diff --git a/.scalafmt.conf b/.scalafmt.conf index 5ade6cc4c..6eb841a4d 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,5 +1,5 @@ style = default -align = some +align = none maxColumn = 120 docstrings = JavaDoc optIn.breakChainOnFirstMethodDot = true @@ -12,4 +12,3 @@ rewrite.rules = [ RedundantParens, PreferCurlyFors ] -align.tokens = ["|", "!", "!!", "||", "=>", "=", "->", "<-", "|@|", "//", "/", "+"] diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlException.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlException.scala index 12af69b9a..998d199c2 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlException.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlException.scala @@ -12,9 +12,7 @@ */ package com.snowplowanalytics.snowplow.enrich.common -// Scalaz import scalaz._ -import Scalaz._ /** * The parent for our ETL-specific exceptions diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlPipeline.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlPipeline.scala index 26613d0c1..09eccce7b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlPipeline.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/EtlPipeline.scala @@ -10,28 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common -// Java import java.io.{PrintWriter, StringWriter} -// Joda -import org.joda.time.DateTime - -// Iglu -import iglu.client.Resolver - -// Scala import scala.util.control.NonFatal -// Scalaz +import com.snowplowanalytics.iglu.client.Resolver +import org.joda.time.DateTime import scalaz._ import Scalaz._ -// This project import adapters.AdapterRegistry import enrichments.{EnrichmentManager, EnrichmentRegistry} import outputs.EnrichedEvent @@ -73,9 +62,9 @@ object EtlPipeline { def flattenToList[A](v: Validated[Option[Validated[NonEmptyList[Validated[A]]]]]): List[Validated[A]] = v match { case Success(Some(Success(nel))) => nel.toList - case Success(Some(Failure(f))) => List(f.fail) - case Failure(f) => List(f.fail) - case Success(None) => Nil + case Success(Some(Failure(f))) => List(f.fail) + case Failure(f) => List(f.fail) + case Success(None) => Nil } try { diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/AdapterRegistry.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/AdapterRegistry.scala index 1ddc2143f..47b480395 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/AdapterRegistry.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/AdapterRegistry.scala @@ -10,24 +10,16 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters -import iglu.client.Resolver - -// Scalaz +import com.snowplowanalytics.iglu.client.Resolver import scalaz._ import Scalaz._ -// This project import loaders.CollectorPayload -import registry.snowplow.{Tp1Adapter => SpTp1Adapter} -import registry.snowplow.{Tp2Adapter => SpTp2Adapter} -import registry.snowplow.{RedirectAdapter => SpRedirectAdapter} import registry._ +import registry.snowplow.{Tp1Adapter => SpTp1Adapter, Tp2Adapter => SpTp2Adapter, RedirectAdapter => SpRedirectAdapter} /** * The AdapterRegistry lets us convert a CollectorPayload @@ -79,24 +71,56 @@ class AdapterRegistry(remoteAdapters: Map[(String, String), RemoteAdapter] = Map } private object Vendor { - val Snowplow = "com.snowplowanalytics.snowplow" - val Redirect = "r" - val Iglu = "com.snowplowanalytics.iglu" - val Callrail = "com.callrail" - val Cloudfront = "com.amazon.aws.cloudfront" + val Snowplow = "com.snowplowanalytics.snowplow" + val Redirect = "r" + val Iglu = "com.snowplowanalytics.iglu" + val Callrail = "com.callrail" + val Cloudfront = "com.amazon.aws.cloudfront" val GoogleAnalytics = "com.google.analytics" - val Mailchimp = "com.mailchimp" - val Mailgun = "com.mailgun" - val Mandrill = "com.mandrill" - val Olark = "com.olark" - val Pagerduty = "com.pagerduty" - val Pingdom = "com.pingdom" - val Sendgrid = "com.sendgrid" - val StatusGator = "com.statusgator" - val Unbounce = "com.unbounce" - val UrbanAirship = "com.urbanairship.connect" - val Marketo = "com.marketo" - val Vero = "com.getvero" - val HubSpot = "com.hubspot" + val Mailchimp = "com.mailchimp" + val Mailgun = "com.mailgun" + val Mandrill = "com.mandrill" + val Olark = "com.olark" + val Pagerduty = "com.pagerduty" + val Pingdom = "com.pingdom" + val Sendgrid = "com.sendgrid" + val StatusGator = "com.statusgator" + val Unbounce = "com.unbounce" + val UrbanAirship = "com.urbanairship.connect" + val Marketo = "com.marketo" + val Vero = "com.getvero" + val HubSpot = "com.hubspot" } + + /** + * Router to determine which adapter to use + * @param payload The CollectorPayload we are transforming + * @param resolver (implicit) The Iglu resolver used for schema lookup and validation + * @return either a NEL of RawEvents on Success, or a NEL of Strings on Failure + */ + def toRawEvents(payload: CollectorPayload)(implicit resolver: Resolver): ValidatedRawEvents = + (payload.api.vendor, payload.api.version) match { + case (Vendor.Snowplow, "tp1") => SpTp1Adapter.toRawEvents(payload) + case (Vendor.Snowplow, "tp2") => SpTp2Adapter.toRawEvents(payload) + case (Vendor.Redirect, "tp2") => SpRedirectAdapter.toRawEvents(payload) + case (Vendor.Iglu, "v1") => IgluAdapter.toRawEvents(payload) + case (Vendor.Callrail, "v1") => CallrailAdapter.toRawEvents(payload) + case (Vendor.Cloudfront, "wd_access_log") => CloudfrontAccessLogAdapter.WebDistribution.toRawEvents(payload) + case (Vendor.Mailchimp, "v1") => MailchimpAdapter.toRawEvents(payload) + case (Vendor.Mailgun, "v1") => MailgunAdapter.toRawEvents(payload) + case (Vendor.GoogleAnalytics, "v1") => GoogleAnalyticsAdapter.toRawEvents(payload) + case (Vendor.Mandrill, "v1") => MandrillAdapter.toRawEvents(payload) + case (Vendor.Olark, "v1") => OlarkAdapter.toRawEvents(payload) + case (Vendor.Pagerduty, "v1") => PagerdutyAdapter.toRawEvents(payload) + case (Vendor.Pingdom, "v1") => PingdomAdapter.toRawEvents(payload) + case (Vendor.Sendgrid, "v3") => SendgridAdapter.toRawEvents(payload) + case (Vendor.StatusGator, "v1") => StatusGatorAdapter.toRawEvents(payload) + case (Vendor.Unbounce, "v1") => UnbounceAdapter.toRawEvents(payload) + case (Vendor.UrbanAirship, "v1") => UrbanAirshipAdapter.toRawEvents(payload) + case (Vendor.Marketo, "v1") => MarketoAdapter.toRawEvents(payload) + case (Vendor.Vero, "v1") => VeroAdapter.toRawEvents(payload) + case (Vendor.HubSpot, "v1") => HubSpotAdapter.toRawEvents(payload) + case _ => + s"Payload with vendor ${payload.api.vendor} and version ${payload.api.version} not supported by this version of Scala Common Enrich".failNel + } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/RawEvent.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/RawEvent.scala index 64d951b8a..1b990d058 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/RawEvent.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/RawEvent.scala @@ -13,7 +13,6 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters -// This project import loaders.{CollectorApi, CollectorContext, CollectorSource} /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/Adapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/Adapter.scala index bad5fa1c3..74fc95082 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/Adapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/Adapter.scala @@ -10,39 +10,26 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Apache URLEncodedUtils -import com.snowplowanalytics.snowplow.enrich.common.adapters.registry.MandrillAdapter._ +import scala.util.control.NonFatal + +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} import org.apache.http.NameValuePair import org.joda.time.{DateTime, DateTimeZone} import org.joda.time.format.DateTimeFormat - -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// Scalaz import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import com.fasterxml.jackson.core.JsonParseException -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} -// errors -import scala.util.control.NonFatal - trait Adapter { // The Iglu schema URI for a Snowplow unstructured event @@ -82,9 +69,10 @@ trait Adapter { * @return the updated JSON with valid date-time * values in the tsFieldKey fields */ - private[registry] def cleanupJsonEventValues(json: JValue, - eventOpt: Option[(String, String)], - tsFieldKey: String): JValue = { + private[registry] def cleanupJsonEventValues( + json: JValue, + eventOpt: Option[(String, String)], + tsFieldKey: String): JValue = { def toStringField(seconds: Long): JString = { val dt: DateTime = new DateTime(seconds * 1000) @@ -92,31 +80,29 @@ trait Adapter { } val j1 = json transformField { - case (k, v) => { + case (k, v) => if (k == tsFieldKey) { v match { - case JInt(x) => { + case JInt(x) => try { (k, toStringField(x.longValue())) } catch { case NonFatal(_) => (k, JInt(x)) } - } - case JString(x) => { + case JString(x) => try { (k, toStringField(x.toLong)) } catch { case NonFatal(_) => (k, JString(x)) } - } + case x => (k, x) } } else (k, v) - } } eventOpt match { case Some((keyName, eventType)) => j1 removeField { _ == JField(keyName, eventType) } - case None => j1 + case None => j1 } } @@ -157,13 +143,13 @@ trait Adapter { * @return a formatter function which converts * RawEventParameters into a cleaned JObject */ - protected[registry] def buildFormatter(bools: List[String] = Nil, - ints: List[String] = Nil, - dateTimes: JU.DateTimeFields = None): FormatterFunc = { - (parameters: RawEventParameters) => - for { - p <- parameters.toList - } yield JU.toJField(p._1, p._2, bools, ints, dateTimes) + protected[registry] def buildFormatter( + bools: List[String] = Nil, + ints: List[String] = Nil, + dateTimes: JU.DateTimeFields = None): FormatterFunc = { (parameters: RawEventParameters) => + for { + p <- parameters.toList + } yield JU.toJField(p._1, p._2, bools, ints, dateTimes) } /** @@ -188,11 +174,12 @@ trait Adapter { * @return the raw-event parameters for a valid * Snowplow unstructured event */ - protected[registry] def toUnstructEventParams(tracker: String, - parameters: RawEventParameters, - schema: String, - formatter: FormatterFunc, - platform: String): RawEventParameters = { + protected[registry] def toUnstructEventParams( + tracker: String, + parameters: RawEventParameters, + schema: String, + formatter: FormatterFunc, + platform: String): RawEventParameters = { val params = formatter(parameters - ("nuid", "aid", "cv", "p")) @@ -204,10 +191,11 @@ trait Adapter { )) } - Map("tv" -> tracker, - "e" -> "ue", - "p" -> parameters.getOrElse("p", platform), // Required field - "ue_pr" -> json) ++ + Map( + "tv" -> tracker, + "e" -> "ue", + "p" -> parameters.getOrElse("p", platform), // Required field + "ue_pr" -> json) ++ parameters.filterKeys(AcceptedQueryParameters) } @@ -265,11 +253,12 @@ trait Adapter { * @return the raw-event parameters for a valid * Snowplow unstructured event */ - protected[registry] def toUnstructEventParams(tracker: String, - qsParams: RawEventParameters, - schema: String, - eventJson: JValue, - platform: String): RawEventParameters = { + protected[registry] def toUnstructEventParams( + tracker: String, + qsParams: RawEventParameters, + schema: String, + eventJson: JValue, + platform: String): RawEventParameters = { val json = compact { toUnstructEvent( @@ -278,10 +267,11 @@ trait Adapter { ) } - Map("tv" -> tracker, - "e" -> "ue", - "p" -> qsParams.getOrElse("p", platform), // Required field - "ue_pr" -> json) ++ + Map( + "tv" -> tracker, + "e" -> "ue", + "p" -> qsParams.getOrElse("p", platform), // Required field + "ue_pr" -> json) ++ qsParams.filterKeys(AcceptedQueryParameters) } @@ -312,8 +302,8 @@ trait Adapter { (successes, failures) match { case (s :: ss, Nil) => NonEmptyList(s, ss: _*).success // No Failures collected. - case (_, f :: fs) => NonEmptyList(f, fs: _*).fail // Some or all are Failures, return these. - case (Nil, Nil) => "List of events is empty (should never happen, not catching empty list properly)".failNel + case (_, f :: fs) => NonEmptyList(f, fs: _*).fail // Some or all are Failures, return these. + case (Nil, Nil) => "List of events is empty (should never happen, not catching empty list properly)".failNel } } @@ -332,9 +322,10 @@ trait Adapter { * @return the schema for the event or a Failure-boxed String * if we cannot recognize the event type */ - protected[registry] def lookupSchema(eventOpt: Option[String], - vendor: String, - eventSchemaMap: Map[String, String]): Validated[String] = + protected[registry] def lookupSchema( + eventOpt: Option[String], + vendor: String, + eventSchemaMap: Map[String, String]): Validated[String] = eventOpt match { case None => s"$vendor event failed: type parameter not provided - cannot determine event type".failNel case Some(eventType) => { @@ -369,10 +360,11 @@ trait Adapter { * @return the schema for the event or a Failure-boxed String * if we cannot recognize the event type */ - protected[registry] def lookupSchema(eventOpt: Option[String], - vendor: String, - index: Int, - eventSchemaMap: Map[String, String]): Validated[String] = + protected[registry] def lookupSchema( + eventOpt: Option[String], + vendor: String, + index: Int, + eventSchemaMap: Map[String, String]): Validated[String] = eventOpt match { case None => s"$vendor event at index [$index] failed: type parameter not provided - cannot determine event type".failNel @@ -430,6 +422,6 @@ trait Adapter { */ private[registry] def camelize(json: JValue): JValue = json.mapField { case (fieldName, JObject(jo)) => (camelCase(fieldName), camelize(jo)) - case (fieldName, jv) => (camelCase(fieldName), jv) + case (fieldName, jv) => (camelCase(fieldName), jv) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CallrailAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CallrailAdapter.scala index d1eb3f4c3..04039ee94 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CallrailAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CallrailAdapter.scala @@ -10,25 +10,16 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// Scalaz -import scalaz._ -import Scalaz._ - -// Joda-Time +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} import org.joda.time.DateTimeZone import org.joda.time.format.DateTimeFormat +import scalaz._ +import Scalaz._ -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -52,8 +43,8 @@ object CallrailAdapter extends Adapter { // Create a simple formatter function private val CallrailFormatter: FormatterFunc = { - val bools = List("first_call", "answered") - val ints = List("duration") + val bools = List("first_call", "answered") + val ints = List("duration") val dateTimes: JU.DateTimeFields = Some((NonEmptyList("datetime"), CallrailDateTimeFormat)) buildFormatter(bools, ints, dateTimes) } @@ -78,11 +69,11 @@ object CallrailAdapter extends Adapter { NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, params, SchemaUris.CallComplete, CallrailFormatter, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, params, SchemaUris.CallComplete, CallrailFormatter, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CloudfrontAccessLogAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CloudfrontAccessLogAdapter.scala index 392f40205..2b6004861 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CloudfrontAccessLogAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/CloudfrontAccessLogAdapter.scala @@ -10,31 +10,19 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// Scala import scala.util.control.NonFatal -// Scalaz +import com.snowplowanalytics.iglu.client.Resolver +import org.joda.time.DateTime import scalaz._ import Scalaz._ - -// Joda-Time -import org.joda.time.DateTime - -// json4s import org.json4s._ import org.json4s.JsonDSL._ -// This project import loaders.{CollectorContext, CollectorPayload} import utils.ConversionUtils @@ -101,16 +89,17 @@ object CloudfrontAccessLogAdapter { case 23 => "1-0-4".successNel // 01 Jul 2015 case 24 => "1-0-5".successNel // 29 Sep 2016 case 26 => "1-0-6".successNel - case n => s"Access log TSV line contained $n fields, expected 12, 15, 18, 19, 23, 24 or 26".failNel + case n => s"Access log TSV line contained $n fields, expected 12, 15, 18, 19, 23, 24 or 26".failNel } schemaVersion.flatMap { v => // Combine the first two fields into a timestamp val schemaCompatibleFields = "%sT%sZ".format(fields(0), fields(1)) :: fields.toList.tail.tail // Attempt to build the json, accumulating errors from unparseable fields - def buildJson(errors: List[String], - fields: List[(String, String)], - json: JObject): (List[String], JObject) = + def buildJson( + errors: List[String], + fields: List[(String, String)], + json: JObject): (List[String], JObject) = fields match { case Nil => (errors, json) case head :: tail => @@ -122,9 +111,10 @@ object CloudfrontAccessLogAdapter { buildJson(errors, tail, json ~ (("timeTaken", field.toDouble))) } catch { case e: NumberFormatException => - buildJson("Field [timeTaken]: cannot convert [%s] to Double".format(field) :: errors, - tail, - json) + buildJson( + "Field [timeTaken]: cannot convert [%s] to Double".format(field) :: errors, + tail, + json) } case (name, field) if name == "csBytes" || name == "scBytes" => try { @@ -149,7 +139,7 @@ object CloudfrontAccessLogAdapter { val (errors, ueJson) = buildJson(Nil, FieldNames zip schemaCompatibleFields, JObject()) val failures = errors match { - case Nil => None.successNel + case Nil => None.successNel case h :: t => (NonEmptyList(h) :::> t).fail // list to nonemptylist } @@ -157,17 +147,17 @@ object CloudfrontAccessLogAdapter { (validatedTstamp |@| failures) { (tstamp, e) => val ip = schemaCompatibleFields(3) match { - case "" => None + case "" => None case nonempty => nonempty.some } val qsParams: Map[String, String] = schemaCompatibleFields(8) match { - case "" => Map() + case "" => Map() case url => Map("url" -> url) } val userAgent = schemaCompatibleFields(9) match { - case "" => None + case "" => None case nonempty => ConversionUtils.singleEncodePcts(nonempty).some } @@ -180,11 +170,11 @@ object CloudfrontAccessLogAdapter { ) NonEmptyList( RawEvent( - api = payload.api, - parameters = parameters, + api = payload.api, + parameters = parameters, contentType = payload.contentType, - source = payload.source, - context = CollectorContext(tstamp, ip, userAgent, None, Nil, None) + source = payload.source, + context = CollectorContext(tstamp, ip, userAgent, None, Nil, None) )) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/GoogleAnalyticsAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/GoogleAnalyticsAdapter.scala index 3c29f482a..2e1587a31 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/GoogleAnalyticsAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/GoogleAnalyticsAdapter.scala @@ -11,31 +11,23 @@ * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich.common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.collection.JavaConversions._ -// Scalaz +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// This project import loaders.CollectorPayload import utils.ConversionUtils._ @@ -46,13 +38,13 @@ import utils.ConversionUtils._ object GoogleAnalyticsAdapter extends Adapter { // for failure messages - private val VendorName = "GoogleAnalytics" - private val GaVendor = "com.google.analytics" - private val Vendor = s"$GaVendor.measurement-protocol" + private val VendorName = "GoogleAnalytics" + private val GaVendor = "com.google.analytics" + private val Vendor = s"$GaVendor.measurement-protocol" private val ProtocolVersion = "v1" - private val Protocol = s"$Vendor-$ProtocolVersion" - private val Format = "jsonschema" - private val SchemaVersion = "1-0-0" + private val Protocol = s"$Vendor-$ProtocolVersion" + private val Format = "jsonschema" + private val SchemaVersion = "1-0-0" private val PageViewHitType = "pageview" @@ -84,9 +76,9 @@ object GoogleAnalyticsAdapter extends Adapter { final case class BooleanType(b: Boolean) extends FieldType implicit val fieldTypeJson4s: FieldType => JValue = (f: FieldType) => f match { - case StringType(s) => JString(s) - case IntType(i) => JInt(i) - case DoubleType(f) => JDouble(f) + case StringType(s) => JString(s) + case IntType(i) => JInt(i) + case DoubleType(f) => JDouble(f) case BooleanType(b) => JBool(b) } @@ -113,8 +105,9 @@ object GoogleAnalyticsAdapter extends Adapter { "dt" -> idTranslation("documentTitle") ) ), - "screenview" -> MPData(SchemaKey(Vendor, "screen_view", Format, SchemaVersion), - Map("cd" -> idTranslation("screenName"))), + "screenview" -> MPData( + SchemaKey(Vendor, "screen_view", Format, SchemaVersion), + Map("cd" -> idTranslation("screenName"))), "event" -> MPData( SchemaKey(Vendor, "event", Format, SchemaVersion), Map( @@ -127,13 +120,13 @@ object GoogleAnalyticsAdapter extends Adapter { "transaction" -> MPData( SchemaKey(Vendor, "transaction", Format, SchemaVersion), Map( - "ti" -> idTranslation("id"), - "ta" -> idTranslation("affiliation"), - "tr" -> twoDecimalsTranslation("revenue"), - "ts" -> twoDecimalsTranslation("shipping"), - "tt" -> twoDecimalsTranslation("tax"), + "ti" -> idTranslation("id"), + "ta" -> idTranslation("affiliation"), + "tr" -> twoDecimalsTranslation("revenue"), + "ts" -> twoDecimalsTranslation("shipping"), + "tt" -> twoDecimalsTranslation("tax"), "tcc" -> idTranslation("couponCode"), - "cu" -> idTranslation("currencyCode") + "cu" -> idTranslation("currencyCode") ) ), "item" -> MPData( @@ -186,45 +179,47 @@ object GoogleAnalyticsAdapter extends Adapter { private val contextData: Map[SchemaKey, Map[String, KVTranslation]] = { // pageview can be a context too val ct = unstructEventData(PageViewHitType) :: List( - MPData(SchemaKey(GaVendor, "undocumented", Format, SchemaVersion), - List("a", "jid", "gjid").map(e => e -> idTranslation(e)).toMap), + MPData( + SchemaKey(GaVendor, "undocumented", Format, SchemaVersion), + List("a", "jid", "gjid").map(e => e -> idTranslation(e)).toMap), MPData( SchemaKey(GaVendor, "private", Format, SchemaVersion), (List("_v", "_u", "_gid").map(e => e -> idTranslation(e.tail)) ++ - List("_s", "_r").map(e => e -> intTranslation(e.tail))).toMap + List("_s", "_r").map(e => e -> intTranslation(e.tail))).toMap ), MPData( SchemaKey(Vendor, "general", Format, SchemaVersion), Map( - "v" -> idTranslation("protocolVersion"), + "v" -> idTranslation("protocolVersion"), "tid" -> idTranslation("trackingId"), "aip" -> booleanTranslation("anonymizeIp"), - "ds" -> idTranslation("dataSource"), - "qt" -> intTranslation("queueTime"), - "z" -> idTranslation("cacheBuster") + "ds" -> idTranslation("dataSource"), + "qt" -> intTranslation("queueTime"), + "z" -> idTranslation("cacheBuster") ) ), - MPData(SchemaKey(Vendor, "user", Format, SchemaVersion), - Map("cid" -> idTranslation("clientId"), "uid" -> idTranslation("userId"))), + MPData( + SchemaKey(Vendor, "user", Format, SchemaVersion), + Map("cid" -> idTranslation("clientId"), "uid" -> idTranslation("userId"))), MPData( SchemaKey(Vendor, "session", Format, SchemaVersion), Map( - "sc" -> idTranslation("sessionControl"), - "uip" -> idTranslation("ipOverride"), - "ua" -> idTranslation("userAgentOverride"), + "sc" -> idTranslation("sessionControl"), + "uip" -> idTranslation("ipOverride"), + "ua" -> idTranslation("userAgentOverride"), "geoid" -> idTranslation("geographicalOverride") ) ), MPData( SchemaKey(Vendor, "traffic_source", Format, SchemaVersion), Map( - "dr" -> idTranslation("documentReferrer"), - "cn" -> idTranslation("campaignName"), - "cs" -> idTranslation("campaignSource"), - "cm" -> idTranslation("campaignMedium"), - "ck" -> idTranslation("campaignKeyword"), - "cc" -> idTranslation("campaignContent"), - "ci" -> idTranslation("campaignId"), + "dr" -> idTranslation("documentReferrer"), + "cn" -> idTranslation("campaignName"), + "cs" -> idTranslation("campaignSource"), + "cm" -> idTranslation("campaignMedium"), + "ck" -> idTranslation("campaignKeyword"), + "cc" -> idTranslation("campaignContent"), + "ci" -> idTranslation("campaignId"), "gclid" -> idTranslation("googleAdwordsId"), "dclid" -> idTranslation("googleDisplayAdsId") ) @@ -245,27 +240,30 @@ object GoogleAnalyticsAdapter extends Adapter { MPData( SchemaKey(Vendor, "app", Format, SchemaVersion), Map( - "an" -> idTranslation("name"), - "aid" -> idTranslation("id"), - "av" -> idTranslation("version"), + "an" -> idTranslation("name"), + "aid" -> idTranslation("id"), + "av" -> idTranslation("version"), "aiid" -> idTranslation("installerId") ) ), MPData( SchemaKey(Vendor, "product_action", Format, SchemaVersion), Map( - "pa" -> idTranslation("productAction"), + "pa" -> idTranslation("productAction"), "pal" -> idTranslation("productActionList"), "cos" -> intTranslation("checkoutStep"), "col" -> idTranslation("checkoutStepOption") ) ), - MPData(SchemaKey(Vendor, "content_experiment", Format, SchemaVersion), - Map("xid" -> idTranslation("id"), "xvar" -> idTranslation("variant"))), - MPData(SchemaKey(Vendor, "hit", Format, SchemaVersion), - Map("t" -> idTranslation("type"), "ni" -> booleanTranslation("nonInteractionHit"))), - MPData(SchemaKey(Vendor, "promotion_action", Format, SchemaVersion), - Map("promoa" -> idTranslation("promotionAction"))) + MPData( + SchemaKey(Vendor, "content_experiment", Format, SchemaVersion), + Map("xid" -> idTranslation("id"), "xvar" -> idTranslation("variant"))), + MPData( + SchemaKey(Vendor, "hit", Format, SchemaVersion), + Map("t" -> idTranslation("type"), "ni" -> booleanTranslation("nonInteractionHit"))), + MPData( + SchemaKey(Vendor, "promotion_action", Format, SchemaVersion), + Map("promoa" -> idTranslation("promotionAction"))) ) ct.map(d => d.schemaKey -> d.translationTable).toMap } @@ -282,103 +280,104 @@ object GoogleAnalyticsAdapter extends Adapter { SchemaKey(Vendor, "product", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}pr" -> intTranslation("index"), - "prid" -> idTranslation("sku"), - "prnm" -> idTranslation("name"), - "prbr" -> idTranslation("brand"), - "prca" -> idTranslation("category"), - "prva" -> idTranslation("variant"), - "prpr" -> twoDecimalsTranslation("price"), - "prqt" -> intTranslation("quantity"), - "prcc" -> idTranslation("couponCode"), - "prps" -> intTranslation("position"), - "cu" -> idTranslation("currencyCode") + "prid" -> idTranslation("sku"), + "prnm" -> idTranslation("name"), + "prbr" -> idTranslation("brand"), + "prca" -> idTranslation("category"), + "prva" -> idTranslation("variant"), + "prpr" -> twoDecimalsTranslation("price"), + "prqt" -> intTranslation("quantity"), + "prcc" -> idTranslation("couponCode"), + "prps" -> intTranslation("position"), + "cu" -> idTranslation("currencyCode") ) ), MPData( SchemaKey(Vendor, "product_custom_dimension", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}prcd" -> intTranslation("productIndex"), - s"${valueInFieldNameIndicator}cd" -> intTranslation("dimensionIndex"), - "prcd" -> idTranslation("value") + s"${valueInFieldNameIndicator}cd" -> intTranslation("dimensionIndex"), + "prcd" -> idTranslation("value") ) ), MPData( SchemaKey(Vendor, "product_custom_metric", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}prcm" -> intTranslation("productIndex"), - s"${valueInFieldNameIndicator}cm" -> intTranslation("metricIndex"), - "prcm" -> intTranslation("value") + s"${valueInFieldNameIndicator}cm" -> intTranslation("metricIndex"), + "prcm" -> intTranslation("value") ) ), MPData( SchemaKey(Vendor, "product_impression_list", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}il" -> intTranslation("index"), - "ilnm" -> idTranslation("name") + "ilnm" -> idTranslation("name") ) ), MPData( SchemaKey(Vendor, "product_impression", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}ilpi" -> intTranslation("listIndex"), - s"${valueInFieldNameIndicator}pi" -> intTranslation("productIndex"), - "ilpiid" -> idTranslation("sku"), - "ilpinm" -> idTranslation("name"), - "ilpibr" -> idTranslation("brand"), - "ilpica" -> idTranslation("category"), - "ilpiva" -> idTranslation("variant"), - "ilpips" -> intTranslation("position"), - "ilpipr" -> twoDecimalsTranslation("price"), - "cu" -> idTranslation("currencyCode") + s"${valueInFieldNameIndicator}pi" -> intTranslation("productIndex"), + "ilpiid" -> idTranslation("sku"), + "ilpinm" -> idTranslation("name"), + "ilpibr" -> idTranslation("brand"), + "ilpica" -> idTranslation("category"), + "ilpiva" -> idTranslation("variant"), + "ilpips" -> intTranslation("position"), + "ilpipr" -> twoDecimalsTranslation("price"), + "cu" -> idTranslation("currencyCode") ) ), MPData( SchemaKey(Vendor, "product_impression_custom_dimension", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}ilpicd" -> intTranslation("listIndex"), - s"${valueInFieldNameIndicator}picd" -> intTranslation("productIndex"), - s"${valueInFieldNameIndicator}cd" -> intTranslation("customDimensionIndex"), - "ilpicd" -> idTranslation("value") + s"${valueInFieldNameIndicator}picd" -> intTranslation("productIndex"), + s"${valueInFieldNameIndicator}cd" -> intTranslation("customDimensionIndex"), + "ilpicd" -> idTranslation("value") ) ), MPData( SchemaKey(Vendor, "product_impression_custom_metric", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}ilpicm" -> intTranslation("listIndex"), - s"${valueInFieldNameIndicator}picm" -> intTranslation("productIndex"), - s"${valueInFieldNameIndicator}cm" -> intTranslation("customMetricIndex"), - "ilpicm" -> intTranslation("value") + s"${valueInFieldNameIndicator}picm" -> intTranslation("productIndex"), + s"${valueInFieldNameIndicator}cm" -> intTranslation("customMetricIndex"), + "ilpicm" -> intTranslation("value") ) ), MPData( SchemaKey(Vendor, "promotion", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}promo" -> intTranslation("index"), - "promoid" -> idTranslation("id"), - "promonm" -> idTranslation("name"), - "promocr" -> idTranslation("creative"), - "promops" -> idTranslation("position") + "promoid" -> idTranslation("id"), + "promonm" -> idTranslation("name"), + "promocr" -> idTranslation("creative"), + "promops" -> idTranslation("position") ) ), MPData( SchemaKey(Vendor, "custom_dimension", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}cd" -> intTranslation("index"), - "cd" -> idTranslation("value") + "cd" -> idTranslation("value") ) ), MPData( SchemaKey(Vendor, "custom_metric", Format, SchemaVersion), Map( s"${valueInFieldNameIndicator}cm" -> intTranslation("index"), - "cm" -> doubleTranslation("value") + "cm" -> doubleTranslation("value") ) ), - MPData(SchemaKey(Vendor, "content_group", Format, SchemaVersion), - Map( - s"${valueInFieldNameIndicator}cg" -> intTranslation("index"), - "cg" -> idTranslation("value") - )) + MPData( + SchemaKey(Vendor, "content_group", Format, SchemaVersion), + Map( + s"${valueInFieldNameIndicator}cg" -> intTranslation("index"), + "cg" -> idTranslation("value") + )) ) // List of schemas for which we need to re attach the currency @@ -401,25 +400,25 @@ object GoogleAnalyticsAdapter extends Adapter { private val directMappings: (String => Map[String, String]) = (hitType: String) => Map( "uip" -> "ip", - "dr" -> "refr", - "de" -> "cs", - "sd" -> "cd", - "ul" -> "lang", - "je" -> "f_java", - "dl" -> "url", - "dt" -> "page", - "ti" -> (if (hitType == "transaction") "tr_id" else "ti_id"), - "ta" -> "tr_af", - "tr" -> "tr_tt", - "ts" -> "tr_sh", - "tt" -> "tr_tx", - "in" -> "ti_nm", - "ip" -> "ti_pr", - "iq" -> "ti_qu", - "ic" -> "ti_sk", - "iv" -> "ti_ca", - "cu" -> (if (hitType == "transaction") "tr_cu" else "ti_cu"), - "ua" -> "ua" + "dr" -> "refr", + "de" -> "cs", + "sd" -> "cd", + "ul" -> "lang", + "je" -> "f_java", + "dl" -> "url", + "dt" -> "page", + "ti" -> (if (hitType == "transaction") "tr_id" else "ti_id"), + "ta" -> "tr_af", + "tr" -> "tr_tt", + "ts" -> "tr_sh", + "tt" -> "tr_tx", + "in" -> "ti_nm", + "ip" -> "ti_pr", + "iq" -> "ti_qu", + "ic" -> "ti_sk", + "iv" -> "ti_ca", + "cu" -> (if (hitType == "transaction") "tr_cu" else "ti_cu"), + "ua" -> "ua" ) /** @@ -432,11 +431,11 @@ object GoogleAnalyticsAdapter extends Adapter { */ def toRawEvents(payload: CollectorPayload)(implicit resolver: Resolver): ValidatedRawEvents = (for { - body <- payload.body + body <- payload.body rawEvents <- body.lines.map(parsePayload(_, payload)).toList.toNel } yield rawEvents) match { case Some(rawEvents) => rawEvents.sequenceU - case None => s"Request body is empty: no $VendorName events to process".failNel + case None => s"Request body is empty: no $VendorName events to process".failNel } /** @@ -446,9 +445,9 @@ object GoogleAnalyticsAdapter extends Adapter { * @return a Validation boxing either a RawEvent or a NEL of Failure Strings */ private def parsePayload(bodyPart: String, payload: CollectorPayload): ValidationNel[String, RawEvent] = { - val params = toMap(URLEncodedUtils.parse(URI.create(s"http://localhost/?$bodyPart"), "UTF-8").toList) + val params = toMap(URLEncodedUtils.parse(URI.create(s"http://localhost/?$bodyPart"), UTF_8).toList) params.get("t") match { - case None => s"No $VendorName t parameter provided: cannot determine hit type".failNel + case None => s"No $VendorName t parameter provided: cannot determine hit type".failNel case Some(hitType) => // direct mappings val mappings = translatePayload(params, directMappings(hitType)) @@ -456,14 +455,15 @@ object GoogleAnalyticsAdapter extends Adapter { .get(hitType) .map(_.translationTable) .toSuccess(s"No matching $VendorName hit type for hit type $hitType".wrapNel) - val schemaVal = lookupSchema(hitType.some, VendorName, unstructEventData.mapValues(_.schemaKey.toSchemaUri)) + val schemaVal = lookupSchema(hitType.some, VendorName, unstructEventData.mapValues(_.schemaKey.toSchemaUri)) val simpleContexts = buildContexts(params, contextData, fieldToSchemaMap) val compositeContexts = - buildCompositeContexts(params, - compositeContextData, - compositeContextsWithCU, - nrCompFieldsPerSchema, - valueInFieldNameIndicator).validation + buildCompositeContexts( + params, + compositeContextData, + compositeContextsWithCU, + nrCompFieldsPerSchema, + valueInFieldNameIndicator).validation .toValidationNel (translationTable |@| schemaVal |@| simpleContexts |@| compositeContexts) { @@ -488,8 +488,8 @@ object GoogleAnalyticsAdapter extends Adapter { parameters = contextParam ++ mappings ++ Map("e" -> "ue", "ue_pr" -> unstructEvent, "tv" -> Protocol, "p" -> "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context ) } }.flatMap(identity) @@ -684,7 +684,7 @@ object GoogleAnalyticsAdapter extends Adapter { private[registry] def breakDownCompField(fieldName: String): \/[String, (List[String], List[String])] = fieldName match { case compositeFieldRegex(grps @ _*) => splitEvenOdd(grps.toList.filter(_.nonEmpty)).right - case s if s.isEmpty => "Cannot parse empty composite field name".left + case s if s.isEmpty => "Cannot parse empty composite field name".left case _ => (s"Cannot parse field name $fieldName, " + s"it doesn't conform to the expected composite field regex: $compositeFieldRegex").left @@ -694,8 +694,8 @@ object GoogleAnalyticsAdapter extends Adapter { private def splitEvenOdd[T](list: List[T]): (List[T], List[T]) = { def go(l: List[T], even: List[T], odd: List[T]): (List[T], List[T], List[T]) = l match { case h1 :: h2 :: t => go(t, h1 :: even, h2 :: odd) - case h :: Nil => (Nil, h :: even, odd) - case Nil => (Nil, even, odd) + case h :: Nil => (Nil, h :: even, odd) + case Nil => (Nil, even, odd) } val res = go(list, Nil, Nil) (res._2.reverse, res._3.reverse) @@ -706,15 +706,15 @@ object GoogleAnalyticsAdapter extends Adapter { list .foldLeft(List.empty[T]) { case (h :: t, e) if e != h => e :: h :: t - case (Nil, e) => e :: Nil - case (l, _) => l + case (Nil, e) => e :: Nil + case (l, _) => l } .reverse /** Transposes a list of lists, does not need to be rectangular unlike the stdlib's version. */ private def transpose[T](l: List[List[T]]): List[List[T]] = l.flatMap(_.headOption) match { - case Nil => Nil + case Nil => Nil case head => head :: transpose(l.collect { case _ :: tail => tail }) } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/HubSpotAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/HubSpotAdapter.scala index 66a1cc6da..c5ade2553 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/HubSpotAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/HubSpotAdapter.scala @@ -10,35 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Jackson -import com.fasterxml.jackson.databind.JsonNode import com.fasterxml.jackson.core.JsonParseException - -// Scalaz +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.joda.time.DateTime import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Iglu -import iglu.client.{Resolver, SchemaKey} -import iglu.client.validation.ValidatableJsonMethods._ - -// Joda Time -import org.joda.time.DateTime -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -60,15 +43,15 @@ object HubSpotAdapter extends Adapter { // Event-Schema Map for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "contact.creation" -> SchemaKey("com.hubspot", "contact_creation", "jsonschema", "1-0-0").toSchemaUri, - "contact.deletion" -> SchemaKey("com.hubspot", "contact_deletion", "jsonschema", "1-0-0").toSchemaUri, + "contact.creation" -> SchemaKey("com.hubspot", "contact_creation", "jsonschema", "1-0-0").toSchemaUri, + "contact.deletion" -> SchemaKey("com.hubspot", "contact_deletion", "jsonschema", "1-0-0").toSchemaUri, "contact.propertyChange" -> SchemaKey("com.hubspot", "contact_change", "jsonschema", "1-0-0").toSchemaUri, - "company.creation" -> SchemaKey("com.hubspot", "company_creation", "jsonschema", "1-0-0").toSchemaUri, - "company.deletion" -> SchemaKey("com.hubspot", "company_deletion", "jsonschema", "1-0-0").toSchemaUri, + "company.creation" -> SchemaKey("com.hubspot", "company_creation", "jsonschema", "1-0-0").toSchemaUri, + "company.deletion" -> SchemaKey("com.hubspot", "company_deletion", "jsonschema", "1-0-0").toSchemaUri, "company.propertyChange" -> SchemaKey("com.hubspot", "company_change", "jsonschema", "1-0-0").toSchemaUri, - "deal.creation" -> SchemaKey("com.hubspot", "deal_creation", "jsonschema", "1-0-0").toSchemaUri, - "deal.deletion" -> SchemaKey("com.hubspot", "deal_deletion", "jsonschema", "1-0-0").toSchemaUri, - "deal.propertyChange" -> SchemaKey("com.hubspot", "deal_change", "jsonschema", "1-0-0").toSchemaUri + "deal.creation" -> SchemaKey("com.hubspot", "deal_creation", "jsonschema", "1-0-0").toSchemaUri, + "deal.deletion" -> SchemaKey("com.hubspot", "deal_deletion", "jsonschema", "1-0-0").toSchemaUri, + "deal.propertyChange" -> SchemaKey("com.hubspot", "deal_change", "jsonschema", "1-0-0").toSchemaUri ) /** @@ -109,13 +92,13 @@ object HubSpotAdapter extends Adapter { } yield { val formattedEvent = reformatParameters(event) - val qsParams = toMap(payload.querystring) + val qsParams = toMap(payload.querystring) RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context ) } } @@ -141,7 +124,7 @@ object HubSpotAdapter extends Adapter { val parsed = parse(body) parsed match { case JArray(list) => list.success - case _ => s"Could not resolve ${VendorName} payload into a JSON array of events".fail + case _ => s"Could not resolve ${VendorName} payload into a JSON array of events".fail } } catch { case e: JsonParseException => { @@ -168,7 +151,7 @@ object HubSpotAdapter extends Adapter { json removeField { case ("subscriptionType", JString(s)) => true - case _ => false + case _ => false } transformField { case ("occurredAt", JInt(value)) => ("occurredAt", toStringField(value.toLong)) } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/IgluAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/IgluAdapter.scala index 6cc15924f..b2a9e55c1 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/IgluAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/IgluAdapter.scala @@ -10,42 +10,32 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils - -// Iglu -import iglu.client.{Resolver, SchemaKey} +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.collection.JavaConversions._ -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import com.fasterxml.jackson.core.JsonParseException -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} /** * Transforms a collector payload which either: * 1. Provides a set of name-value pairs on a GET querystring - * with a &schema=[[iglu schema uri]] parameter. - * 2. Provides a &schema=[[iglu schema uri]] parameter on a POST + * with a &schema={iglu schema uri} parameter. + * 2. Provides a &schema={iglu schema uri} parameter on a POST * querystring and a set of name-value pairs in the body. * - Formatted as JSON * - Formatted as a Form Body @@ -78,11 +68,11 @@ object IgluAdapter extends Adapter { val params = toMap(payload.querystring) (params.get("schema"), payload.body, payload.contentType) match { - case (_, Some(body), None) => s"$VendorName event failed: ContentType must be set for a POST payload".failNel - case (None, Some(body), Some(contentType)) => payloadSdJsonToEvent(payload, body, contentType, params) + case (_, Some(body), None) => s"$VendorName event failed: ContentType must be set for a POST payload".failNel + case (None, Some(body), Some(contentType)) => payloadSdJsonToEvent(payload, body, contentType, params) case (Some(schemaUri), Some(body), Some(contentType)) => payloadToEventWithSchema(payload, schemaUri, params) - case (Some(schemaUri), None, _) => payloadToEventWithSchema(payload, schemaUri, params) - case (_, _, _) => s"$VendorName event failed: is not a sd-json or a valid GET or POST request".failNel + case (Some(schemaUri), None, _) => payloadToEventWithSchema(payload, schemaUri, params) + case (_, _, _) => s"$VendorName event failed: is not a sd-json or a valid GET or POST request".failNel } } @@ -98,14 +88,15 @@ object IgluAdapter extends Adapter { * @param contentType The extracted contentType string * @param params The raw map of params from the querystring. */ - private[registry] def payloadSdJsonToEvent(payload: CollectorPayload, - body: String, - contentType: String, - params: Map[String, String]): ValidatedRawEvents = + private[registry] def payloadSdJsonToEvent( + payload: CollectorPayload, + body: String, + contentType: String, + params: Map[String, String]): ValidatedRawEvents = contentType match { - case "application/json" => sdJsonBodyToEvent(payload, body, params) + case "application/json" => sdJsonBodyToEvent(payload, body, params) case "application/json; charset=utf-8" => sdJsonBodyToEvent(payload, body, params) - case _ => "Content type not supported".failNel + case _ => "Content type not supported".failNel } /** @@ -117,9 +108,10 @@ object IgluAdapter extends Adapter { * @param body The extracted body string * @param params The raw map of params from the querystring. */ - private[registry] def sdJsonBodyToEvent(payload: CollectorPayload, - body: String, - params: Map[String, String]): ValidatedRawEvents = { + private[registry] def sdJsonBodyToEvent( + payload: CollectorPayload, + body: String, + params: Map[String, String]): ValidatedRawEvents = { implicit val formats = org.json4s.DefaultFormats @@ -132,11 +124,11 @@ object IgluAdapter extends Adapter { case Success(_) => { NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, params, schemaUri, data, "app"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, params, schemaUri, data, "app"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } } @@ -160,9 +152,10 @@ object IgluAdapter extends Adapter { * @param schemaUri The schema-uri found * @param params The raw map of params from the querystring. */ - private[registry] def payloadToEventWithSchema(payload: CollectorPayload, - schemaUri: String, - params: Map[String, String]): ValidatedRawEvents = + private[registry] def payloadToEventWithSchema( + payload: CollectorPayload, + schemaUri: String, + params: Map[String, String]): ValidatedRawEvents = SchemaKey.parse(schemaUri) match { case Failure(procMsg) => procMsg.getMessage.failNel case Success(_) => @@ -170,19 +163,19 @@ object IgluAdapter extends Adapter { case (None, _) => { NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, (params - "schema"), schemaUri, IgluFormatter, "app"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, (params - "schema"), schemaUri, IgluFormatter, "app"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } case (Some(body), Some(contentType)) => { contentType match { - case "application/json" => jsonBodyToEvent(payload, body, schemaUri, params) - case "application/json; charset=utf-8" => jsonBodyToEvent(payload, body, schemaUri, params) + case "application/json" => jsonBodyToEvent(payload, body, schemaUri, params) + case "application/json; charset=utf-8" => jsonBodyToEvent(payload, body, schemaUri, params) case "application/x-www-form-urlencoded" => formBodyToEvent(payload, body, schemaUri, params) - case _ => "Content type not supported".failNel + case _ => "Content type not supported".failNel } } case (_, None) => "Content type has not been specified".failNel @@ -198,17 +191,18 @@ object IgluAdapter extends Adapter { * @param params The query string parameters * @return a single validated event */ - private[registry] def jsonBodyToEvent(payload: CollectorPayload, - body: String, - schemaUri: String, - params: Map[String, String]): ValidatedRawEvents = { + private[registry] def jsonBodyToEvent( + payload: CollectorPayload, + body: String, + schemaUri: String, + params: Map[String, String]): ValidatedRawEvents = { def buildRawEvent(e: JValue): RawEvent = RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, (params - "schema"), schemaUri, e, "app"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, (params - "schema"), schemaUri, e, "app"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context ) parseJsonSafe(body) match { @@ -217,7 +211,7 @@ object IgluAdapter extends Adapter { case a: JArray => a.arr match { case h :: t => (NonEmptyList(buildRawEvent(h)) :::> t.map(buildRawEvent)).success - case Nil => s"$VendorName event failed json sanity check: array of events cannot be empty".failNel + case Nil => s"$VendorName event failed json sanity check: array of events cannot be empty".failNel } case _ => if (parsed.children.isEmpty) { @@ -240,22 +234,23 @@ object IgluAdapter extends Adapter { * @param params The query string parameters * @return a single validated event */ - private[registry] def formBodyToEvent(payload: CollectorPayload, - body: String, - schemaUri: String, - params: Map[String, String]): ValidatedRawEvents = + private[registry] def formBodyToEvent( + payload: CollectorPayload, + body: String, + schemaUri: String, + params: Map[String, String]): ValidatedRawEvents = try { - val bodyMap = toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), "UTF-8").toList) - val json = compact(render(bodyMap)) - val event = parse(json) + val bodyMap = toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), UTF_8).toList) + val json = compact(render(bodyMap)) + val event = parse(json) NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, (params - "schema"), schemaUri, event, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, (params - "schema"), schemaUri, event, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } catch { case e: JsonParseException => { diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailchimpAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailchimpAdapter.scala index 9258018d6..7df410382 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailchimpAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailchimpAdapter.scala @@ -10,42 +10,24 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils - -// Joda-Time -import org.joda.time.{DateTime, DateTimeZone} -import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} - -// Jackson -import com.fasterxml.jackson.databind.JsonNode +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.collection.JavaConversions._ -// Scalaz +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils +import org.joda.time.DateTimeZone +import org.joda.time.format.DateTimeFormat import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Iglu -import iglu.client.{Resolver, SchemaKey} -import iglu.client.validation.ValidatableJsonMethods._ -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -67,12 +49,12 @@ object MailchimpAdapter extends Adapter { // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "subscribe" -> SchemaKey("com.mailchimp", "subscribe", "jsonschema", "1-0-0").toSchemaUri, + "subscribe" -> SchemaKey("com.mailchimp", "subscribe", "jsonschema", "1-0-0").toSchemaUri, "unsubscribe" -> SchemaKey("com.mailchimp", "unsubscribe", "jsonschema", "1-0-0").toSchemaUri, - "campaign" -> SchemaKey("com.mailchimp", "campaign_sending_status", "jsonschema", "1-0-0").toSchemaUri, - "cleaned" -> SchemaKey("com.mailchimp", "cleaned_email", "jsonschema", "1-0-0").toSchemaUri, - "upemail" -> SchemaKey("com.mailchimp", "email_address_change", "jsonschema", "1-0-0").toSchemaUri, - "profile" -> SchemaKey("com.mailchimp", "profile_update", "jsonschema", "1-0-0").toSchemaUri + "campaign" -> SchemaKey("com.mailchimp", "campaign_sending_status", "jsonschema", "1-0-0").toSchemaUri, + "cleaned" -> SchemaKey("com.mailchimp", "cleaned_email", "jsonschema", "1-0-0").toSchemaUri, + "upemail" -> SchemaKey("com.mailchimp", "email_address_change", "jsonschema", "1-0-0").toSchemaUri, + "profile" -> SchemaKey("com.mailchimp", "profile_update", "jsonschema", "1-0-0").toSchemaUri ) // Datetime format used by MailChimp (as we will need to massage) @@ -105,7 +87,7 @@ object MailchimpAdapter extends Adapter { s"Content type of ${ct} provided, expected ${ContentType} for ${VendorName}".failNel case (Some(body), _) => { - val params = toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), "UTF-8").toList) + val params = toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), UTF_8).toList) params.get("type") match { case None => s"No ${VendorName} type parameter provided: cannot determine event type".failNel case Some(eventType) => { @@ -116,11 +98,11 @@ object MailchimpAdapter extends Adapter { } yield { NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, allParams, schema, MailchimpFormatter, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, allParams, schema, MailchimpFormatter, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )) } } @@ -168,8 +150,10 @@ object MailchimpAdapter extends Adapter { */ private[registry] def toNestedJField(keys: NonEmptyList[String], value: String): JField = keys.toList match { - case head :: second :: tail => JField(head, toNestedJField(NonEmptyList(second, tail: _*), value)) - case head :: Nil => JField(head, JString(value)) + case h1 :: h2 :: t => JField(h1, toNestedJField(NonEmptyList(h2, t: _*), value)) + case h :: Nil => JField(h, JString(value)) + // unreachable but can't pattern match on NEL + case _ => JField("", JString(value)) } /** @@ -189,7 +173,7 @@ object MailchimpAdapter extends Adapter { private[registry] def mergeJFields(jfields: List[JField]): JObject = jfields match { case x :: xs => xs.foldLeft(JObject(x))(_ merge JObject(_)) - case Nil => JObject(Nil) + case Nil => JObject(Nil) } /** @@ -204,6 +188,6 @@ object MailchimpAdapter extends Adapter { private[registry] def reformatParameters(parameters: RawEventParameters): RawEventParameters = parameters.get("fired_at") match { case Some(firedAt) => parameters.updated("fired_at", JU.toJsonSchemaDateTime(firedAt, MailchimpDateTimeFormat)) - case None => parameters + case None => parameters } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailgunAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailgunAdapter.scala index 18c7fc536..e52946037 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailgunAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MailgunAdapter.scala @@ -10,38 +10,26 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.collection.JavaConversions._ import scala.util.control.NonFatal import scala.util.{Try, Success => TS, Failure => TF} -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ - -// Jackson -import com.fasterxml.jackson.core.JsonParseException - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -59,17 +47,17 @@ object MailgunAdapter extends Adapter { private val TrackerVersion = "com.mailgun-v1" // Expected content type for a request body - private val ContentTypes = List("application/x-www-form-urlencoded", "multipart/form-data") + private val ContentTypes = List("application/x-www-form-urlencoded", "multipart/form-data") private val ContentTypesStr = ContentTypes.mkString(" or ") // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "bounced" -> SchemaKey("com.mailgun", "message_bounced", "jsonschema", "1-0-0").toSchemaUri, - "clicked" -> SchemaKey("com.mailgun", "message_clicked", "jsonschema", "1-0-0").toSchemaUri, - "complained" -> SchemaKey("com.mailgun", "message_complained", "jsonschema", "1-0-0").toSchemaUri, - "delivered" -> SchemaKey("com.mailgun", "message_delivered", "jsonschema", "1-0-0").toSchemaUri, - "dropped" -> SchemaKey("com.mailgun", "message_dropped", "jsonschema", "1-0-0").toSchemaUri, - "opened" -> SchemaKey("com.mailgun", "message_opened", "jsonschema", "1-0-0").toSchemaUri, + "bounced" -> SchemaKey("com.mailgun", "message_bounced", "jsonschema", "1-0-0").toSchemaUri, + "clicked" -> SchemaKey("com.mailgun", "message_clicked", "jsonschema", "1-0-0").toSchemaUri, + "complained" -> SchemaKey("com.mailgun", "message_complained", "jsonschema", "1-0-0").toSchemaUri, + "delivered" -> SchemaKey("com.mailgun", "message_delivered", "jsonschema", "1-0-0").toSchemaUri, + "dropped" -> SchemaKey("com.mailgun", "message_dropped", "jsonschema", "1-0-0").toSchemaUri, + "opened" -> SchemaKey("com.mailgun", "message_opened", "jsonschema", "1-0-0").toSchemaUri, "unsubscribed" -> SchemaKey("com.mailgun", "recipient_unsubscribed", "jsonschema", "1-0-0").toSchemaUri ) @@ -100,7 +88,7 @@ object MailgunAdapter extends Adapter { Try { getBoundary(ct) .map(parseMultipartForm(body, _)) - .getOrElse(toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), "UTF-8").toList)) + .getOrElse(toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), UTF_8).toList)) } match { case TF(e) => s"${VendorName}Adapter could not parse body: [${JU.stripInstanceEtc(e.getMessage).orNull}]".failureNel @@ -111,21 +99,21 @@ object MailgunAdapter extends Adapter { case eventType => for { schemaUri <- lookupSchema(eventType.some, VendorName, EventSchemaMap) - event <- payloadBodyToEvent(bodyMap) - mEvent <- mutateMailgunEvent(event) + event <- payloadBodyToEvent(bodyMap) + mEvent <- mutateMailgunEvent(event) } yield NonEmptyList( RawEvent( api = payload.api, - parameters = - toUnstructEventParams(TrackerVersion, - params, - schemaUri, - cleanupJsonEventValues(mEvent, ("event", eventType).some, "timestamp"), - "srv"), + parameters = toUnstructEventParams( + TrackerVersion, + params, + schemaUri, + cleanupJsonEventValues(mEvent, ("event", eventType).some, "timestamp"), + "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )) } .getOrElse(s"No ${VendorName} event parameter provided: cannot determine event type".failureNel) @@ -167,7 +155,7 @@ object MailgunAdapter extends Adapter { */ private def getBoundary(contentType: String): Option[String] = contentType match { case boundaryRegex(boundaryString) => Some(boundaryString) - case _ => None + case _ => None } /** @@ -190,7 +178,7 @@ object MailgunAdapter extends Adapter { .split(s"--$boundary") .flatMap({ case formDataRegex(k, v) => Some((k, v)) - case _ => None + case _ => None }) .toMap @@ -205,11 +193,11 @@ object MailgunAdapter extends Adapter { case (_, _, None) => s"${VendorName} event data missing 'signature'".failureNel case (Some(timestamp), Some(token), Some(signature)) => { try { - val json = compact(render(bodyMap)) + val json = compact(render(bodyMap)) val event = parse(json) event match { case obj: JObject => obj.success - case _ => s"${VendorName} event wrong type: [%s]".format(event.getClass).failureNel + case _ => s"${VendorName} event wrong type: [%s]".format(event.getClass).failureNel } } catch { case e: JsonParseException => diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MandrillAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MandrillAdapter.scala index aed0bc4d4..218b5ca98 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MandrillAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MandrillAdapter.scala @@ -10,41 +10,23 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils - -// Joda-Time -import org.joda.time.{DateTime, DateTimeZone} -import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} - -// Jackson -import com.fasterxml.jackson.core.JsonParseException +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.collection.JavaConversions._ -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Iglu -import iglu.client.{Resolver, SchemaKey} -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -67,14 +49,14 @@ object MandrillAdapter extends Adapter { // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( "hard_bounce" -> SchemaKey("com.mandrill", "message_bounced", "jsonschema", "1-0-1").toSchemaUri, - "click" -> SchemaKey("com.mandrill", "message_clicked", "jsonschema", "1-0-1").toSchemaUri, - "deferral" -> SchemaKey("com.mandrill", "message_delayed", "jsonschema", "1-0-1").toSchemaUri, - "spam" -> SchemaKey("com.mandrill", "message_marked_as_spam", "jsonschema", "1-0-1").toSchemaUri, - "open" -> SchemaKey("com.mandrill", "message_opened", "jsonschema", "1-0-1").toSchemaUri, - "reject" -> SchemaKey("com.mandrill", "message_rejected", "jsonschema", "1-0-0").toSchemaUri, - "send" -> SchemaKey("com.mandrill", "message_sent", "jsonschema", "1-0-0").toSchemaUri, + "click" -> SchemaKey("com.mandrill", "message_clicked", "jsonschema", "1-0-1").toSchemaUri, + "deferral" -> SchemaKey("com.mandrill", "message_delayed", "jsonschema", "1-0-1").toSchemaUri, + "spam" -> SchemaKey("com.mandrill", "message_marked_as_spam", "jsonschema", "1-0-1").toSchemaUri, + "open" -> SchemaKey("com.mandrill", "message_opened", "jsonschema", "1-0-1").toSchemaUri, + "reject" -> SchemaKey("com.mandrill", "message_rejected", "jsonschema", "1-0-0").toSchemaUri, + "send" -> SchemaKey("com.mandrill", "message_sent", "jsonschema", "1-0-0").toSchemaUri, "soft_bounce" -> SchemaKey("com.mandrill", "message_soft_bounced", "jsonschema", "1-0-1").toSchemaUri, - "unsub" -> SchemaKey("com.mandrill", "recipient_unsubscribed", "jsonschema", "1-0-1").toSchemaUri + "unsub" -> SchemaKey("com.mandrill", "recipient_unsubscribed", "jsonschema", "1-0-1").toSchemaUri ) /** @@ -120,15 +102,15 @@ object MandrillAdapter extends Adapter { val formattedEvent = cleanupJsonEventValues(event, eventOpt match { case Some(x) => ("event", x).some - case None => None + case None => None }, "ts") val qsParams = toMap(payload.querystring) RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context ) } } @@ -157,20 +139,20 @@ object MandrillAdapter extends Adapter { */ private[registry] def payloadBodyToEvents(rawEventString: String): Validation[String, List[JValue]] = { - val bodyMap = toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + rawEventString), "UTF-8").toList) + val bodyMap = toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + rawEventString), UTF_8).toList) bodyMap match { case map if map.size != 1 => s"Mapped ${VendorName} body has invalid count of keys: ${map.size}".fail case map => { map.get("mandrill_events") match { - case None => s"Mapped ${VendorName} body does not have 'mandrill_events' as a key".fail + case None => s"Mapped ${VendorName} body does not have 'mandrill_events' as a key".fail case Some("") => s"${VendorName} events string is empty: nothing to process".fail case Some(dStr) => { try { val parsed = parse(dStr) parsed match { case JArray(list) => list.success - case _ => s"Could not resolve ${VendorName} payload into a JSON array of events".fail + case _ => s"Could not resolve ${VendorName} payload into a JSON array of events".fail } } catch { case e: JsonParseException => { diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MarketoAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MarketoAdapter.scala index 2fc4287c3..f64c6c58c 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MarketoAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/MarketoAdapter.scala @@ -10,32 +10,22 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Scalaz -import scalaz.Scalaz._ - -// json4s -import org.json4s._ -import org.json4s.jackson.JsonMethods._ +import scala.util.{Failure, Success, Try} -// Iglu import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} - -// Joda Time import org.joda.time.DateTimeZone import org.joda.time.format.DateTimeFormat +import scalaz._ +import Scalaz._ +import org.json4s._ +import org.json4s.jackson.JsonMethods._ -// This project -import com.snowplowanalytics.snowplow.enrich.common.loaders.CollectorPayload -import com.snowplowanalytics.snowplow.enrich.common.utils.{JsonUtils => JU} - -import scala.util.{Failure, Success, Try} +import loaders.CollectorPayload +import utils.{JsonUtils => JU} /** * Transforms a collector payload which conforms to @@ -100,11 +90,12 @@ object MarketoAdapter extends Adapter { eventType = Some("event") schema <- lookupSchema(eventType, VendorName, EventSchemaMap) params = toUnstructEventParams(TrackerVersion, toMap(payload.querystring), schema, parsedConverted, "srv") - rawEvent = RawEvent(api = payload.api, - parameters = params, - contentType = payload.contentType, - source = payload.source, - context = payload.context) + rawEvent = RawEvent( + api = payload.api, + parameters = params, + contentType = payload.contentType, + source = payload.source, + context = payload.context) } yield rawEvent /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/OlarkAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/OlarkAdapter.scala index 789813e96..0619f261d 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/OlarkAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/OlarkAdapter.scala @@ -10,40 +10,26 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI +import java.nio.charset.StandardCharsets.UTF_8 -import org.apache.http.client.utils.URLEncodedUtils -import org.joda.time.DateTime - -// Scala -import scala.util.matching.Regex import scala.util.control.NonFatal import scala.collection.JavaConversions._ import scala.util.{Try, Success => TS, Failure => TF} -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils +import org.joda.time.DateTime import scalaz._ import Scalaz._ - -// Jackson -import com.fasterxml.jackson.core.JsonParseException - -// json4s import org.json4s._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -65,7 +51,7 @@ object OlarkAdapter extends Adapter { // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "transcript" -> SchemaKey("com.olark", "transcript", "jsonschema", "1-0-0").toSchemaUri, + "transcript" -> SchemaKey("com.olark", "transcript", "jsonschema", "1-0-0").toSchemaUri, "offline_message" -> SchemaKey("com.olark", "offline_message", "jsonschema", "1-0-0").toSchemaUri ) @@ -93,14 +79,14 @@ object OlarkAdapter extends Adapter { case (Some(body), _) if (body.isEmpty) => s"${VendorName} event body is empty: nothing to process".failureNel case (Some(body), _) => { val qsParams = toMap(payload.querystring) - Try { toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), "UTF-8").toList) } match { + Try { toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), UTF_8).toList) } match { case TF(e) => s"${VendorName} could not parse body: [${JU.stripInstanceEtc(e.getMessage).orNull}]".failureNel case TS(bodyMap) => payloadBodyToEvent(bodyMap).flatMap { case event => { val eventType = (event \ "operators") match { case (JNothing) => Some("offline_message") - case (_) => Some("transcript") + case (_) => Some("transcript") } lookupSchema(eventType, VendorName, EventSchemaMap).flatMap { case schema => @@ -109,14 +95,15 @@ object OlarkAdapter extends Adapter { NonEmptyList( RawEvent( api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, - qsParams, - schema, - camelize(transformedEvent), - "srv"), + parameters = toUnstructEventParams( + TrackerVersion, + qsParams, + schema, + camelize(transformedEvent), + "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } } @@ -135,7 +122,7 @@ object OlarkAdapter extends Adapter { private def transformTimestamps(json: JValue): Validated[JValue] = { def toMsec(oTs: String): Long = (oTs.split('.') match { - case Array(sec) => s"${sec}000" + case Array(sec) => s"${sec}000" case Array(sec, msec) => s"${sec}${msec.take(3).padTo(3, '0')}" }).toLong @@ -163,14 +150,14 @@ object OlarkAdapter extends Adapter { */ private def payloadBodyToEvent(bodyMap: Map[String, String]): Validated[JObject] = bodyMap.get("data") match { - case None => s"${VendorName} event data does not have 'data' as a key".failureNel + case None => s"${VendorName} event data does not have 'data' as a key".failureNel case Some("") => s"${VendorName} event data is empty: nothing to process".failureNel case Some(json) => { try { val event = parse(json) event match { case obj: JObject => obj.successNel - case _ => s"${VendorName} event wrong type: [%s]".format(event.getClass).failureNel + case _ => s"${VendorName} event wrong type: [%s]".format(event.getClass).failureNel } } catch { case e: JsonParseException => diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PagerdutyAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PagerdutyAdapter.scala index 8473abde3..98141e4c1 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PagerdutyAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PagerdutyAdapter.scala @@ -10,35 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Jackson -import com.fasterxml.jackson.databind.JsonNode +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} import com.fasterxml.jackson.core.JsonParseException - -// Scala -import scala.collection.JavaConversions._ - -// Scalaz import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Iglu -import iglu.client.{Resolver, SchemaKey} -import iglu.client.validation.ValidatableJsonMethods._ -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -61,13 +43,13 @@ object PagerdutyAdapter extends Adapter { // Event-Schema Map for reverse-engineering a Snowplow unstructured event private val Incident = SchemaKey("com.pagerduty", "incident", "jsonschema", "1-0-0").toSchemaUri private val EventSchemaMap = Map( - "incident.trigger" -> Incident, - "incident.acknowledge" -> Incident, + "incident.trigger" -> Incident, + "incident.acknowledge" -> Incident, "incident.unacknowledge" -> Incident, - "incident.resolve" -> Incident, - "incident.assign" -> Incident, - "incident.escalate" -> Incident, - "incident.delegate" -> Incident + "incident.resolve" -> Incident, + "incident.assign" -> Incident, + "incident.escalate" -> Incident, + "incident.delegate" -> Incident ) /** @@ -108,13 +90,13 @@ object PagerdutyAdapter extends Adapter { } yield { val formattedEvent = reformatParameters(event) - val qsParams = toMap(payload.querystring) + val qsParams = toMap(payload.querystring) RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context ) } } @@ -140,8 +122,8 @@ object PagerdutyAdapter extends Adapter { val parsed = parse(body) (parsed \ "messages") match { case JArray(list) => list.success - case JNothing => s"${VendorName} payload does not contain the needed 'messages' key".fail - case _ => s"Could not resolve ${VendorName} payload into a JSON array of events".fail + case JNothing => s"${VendorName} payload does not contain the needed 'messages' key".fail + case _ => s"Could not resolve ${VendorName} payload into a JSON array of events".fail } } catch { case e: JsonParseException => { @@ -191,7 +173,7 @@ object PagerdutyAdapter extends Adapter { case (key, JString("null")) => (key, JNull) case ("type", JString(value)) if value.startsWith("incident.") => ("type", JString(value.replace("incident.", ""))) - case ("created_on", JString(value)) => ("created_on", JString(formatDatetime(value))) + case ("created_on", JString(value)) => ("created_on", JString(formatDatetime(value))) case ("last_status_change_on", JString(value)) => ("last_status_change_on", JString(formatDatetime(value))) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PingdomAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PingdomAdapter.scala index 9f9146f84..b07906f25 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PingdomAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/PingdomAdapter.scala @@ -10,34 +10,19 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import org.apache.http.NameValuePair -// Scala -import scala.util.matching.Regex - -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} import scalaz._ import Scalaz._ - -// Jackson -import com.fasterxml.jackson.core.JsonParseException - -// json4s import org.json4s._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -60,8 +45,8 @@ object PingdomAdapter extends Adapter { // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "assign" -> SchemaKey("com.pingdom", "incident_assign", "jsonschema", "1-0-0").toSchemaUri, - "notify_user" -> SchemaKey("com.pingdom", "incident_notify_user", "jsonschema", "1-0-0").toSchemaUri, + "assign" -> SchemaKey("com.pingdom", "incident_assign", "jsonschema", "1-0-0").toSchemaUri, + "notify_user" -> SchemaKey("com.pingdom", "incident_notify_user", "jsonschema", "1-0-0").toSchemaUri, "notify_of_close" -> SchemaKey("com.pingdom", "incident_notify_of_close", "jsonschema", "1-0-0").toSchemaUri ) @@ -100,14 +85,14 @@ object PingdomAdapter extends Adapter { } } yield { val formattedEvent = reformatParameters(parsedEvent) - val qsParams = s - "message" + val qsParams = s - "message" NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, qsParams, schema, formattedEvent, "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )) } } @@ -155,8 +140,8 @@ object PingdomAdapter extends Adapter { } yield f (successes, failures) match { - case (s :: ss, Nil) => (s :: ss).toMap.successNel // No Failures collected. - case (_, f :: fs) => NonEmptyList(f, fs: _*).fail // Some Failures, return only those. + case (s :: ss, Nil) => (s :: ss).toMap.successNel // No Failures collected. + case (_, f :: fs) => NonEmptyList(f, fs: _*).fail // Some Failures, return only those. case (Nil, Nil) => "Empty parameters list was passed - should never happen: empty querystring is not being caught".failNel } @@ -192,6 +177,6 @@ object PingdomAdapter extends Adapter { private[registry] def reformatParameters(json: JValue): JValue = (json \ "action").extractOpt[String] match { case Some(eventType) => json removeField { _ == JField("action", JString(eventType)) } - case None => json + case None => json } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/SendgridAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/SendgridAdapter.scala index 98929326d..e343718fb 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/SendgridAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/SendgridAdapter.scala @@ -10,36 +10,23 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Scalaz -import com.fasterxml.jackson.core.JsonParseException -import com.snowplowanalytics.snowplow.enrich.common.adapters.registry.SendgridAdapter._ -import org.joda.time.{DateTime, DateTimeZone} -import org.joda.time.format.DateTimeFormat +import javax.mail.internet.ContentType -import scalaz.Scalaz._ -import scalaz._ +import scala.util.Try -// json4s +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import scalaz._ +import Scalaz._ import org.json4s._ import org.json4s.jackson.JsonMethods._ -// Iglu -import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} - -// This project -import com.snowplowanalytics.snowplow.enrich.common.loaders.CollectorPayload -import com.snowplowanalytics.snowplow.enrich.common.utils.{JsonUtils => JU} - -import javax.mail.internet.ContentType - -import scala.util.Try +import loaders.CollectorPayload +import utils.{JsonUtils => JU} /** * Transforms a collector payload which conforms to @@ -93,22 +80,22 @@ object SendgridAdapter extends Adapter { for ((itm, index) <- parsed.children.zipWithIndex) yield { - val eventType = (itm \\ "event").extractOpt[String] + val eventType = (itm \\ "event").extractOpt[String] val queryString = toMap(payload.querystring) lookupSchema(eventType, VendorName, index, EventSchemaMap) map { schema => { RawEvent( api = payload.api, - parameters = - toUnstructEventParams(TrackerVersion, - queryString, - schema, - cleanupJsonEventValues(itm, ("event", eventType.get).some, "timestamp"), - "srv"), + parameters = toUnstructEventParams( + TrackerVersion, + queryString, + schema, + cleanupJsonEventValues(itm, ("event", eventType.get).some, "timestamp"), + "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context ) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/StatusGatorAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/StatusGatorAdapter.scala index 5af31ee98..f508e65de 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/StatusGatorAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/StatusGatorAdapter.scala @@ -10,38 +10,25 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.collection.JavaConversions._ -import scala.util.control.NonFatal import scala.util.{Try, Success => TS, Failure => TF} -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ - -// Jackson -import com.fasterxml.jackson.core.JsonParseException - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -89,21 +76,21 @@ object StatusGatorAdapter extends Adapter { case (Some(body), _) => { val qsParams = toMap(payload.querystring) Try { - toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), "UTF-8").toList) + toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), UTF_8).toList) } match { case TF(e) => s"${VendorName} incorrect event string : [${JU.stripInstanceEtc(e.getMessage).orNull}]".failureNel case TS(bodyMap) => try { val a: Map[String, String] = bodyMap - val event = parse(compact(render(a))) + val event = parse(compact(render(a))) NonEmptyList( RawEvent( - api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, qsParams, EventSchema, camelize(event), "srv"), + api = payload.api, + parameters = toUnstructEventParams(TrackerVersion, qsParams, EventSchema, camelize(event), "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } catch { case e: JsonParseException => diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UnbounceAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UnbounceAdapter.scala index 942600797..1519488b3 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UnbounceAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UnbounceAdapter.scala @@ -10,37 +10,25 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import java.net.URI -import org.apache.http.client.utils.URLEncodedUtils +import java.nio.charset.StandardCharsets.UTF_8 -// Scala import scala.util.{Try, Success => TS, Failure => TF} import scala.collection.JavaConversions._ -// Scalaz +import com.fasterxml.jackson.core.JsonParseException +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ - -// Jackson -import com.fasterxml.jackson.core.JsonParseException - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaKey} - -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -92,7 +80,7 @@ object UnbounceAdapter extends Adapter { if (body.isEmpty) s"${VendorName} event body is empty: nothing to process".failureNel else { val qsParams = toMap(payload.querystring) - Try { toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), "UTF-8").toList) } match { + Try { toMap(URLEncodedUtils.parse(URI.create("http://localhost/?" + body), UTF_8).toList) } match { case TF(e) => s"${VendorName} incorrect event string : [${JU.stripInstanceEtc(e.getMessage).orNull}]".failureNel case TS(bodyMap) => @@ -104,11 +92,11 @@ object UnbounceAdapter extends Adapter { case unstructEventParams => NonEmptyList( RawEvent( - api = payload.api, - parameters = unstructEventParams, + api = payload.api, + parameters = unstructEventParams, contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } } @@ -119,11 +107,12 @@ object UnbounceAdapter extends Adapter { } private def payloadBodyToEvent(bodyMap: Map[String, String]): Validated[JValue] = - (bodyMap.get("page_id"), - bodyMap.get("page_name"), - bodyMap.get("variant"), - bodyMap.get("page_url"), - bodyMap.get("data.json")) match { + ( + bodyMap.get("page_id"), + bodyMap.get("page_name"), + bodyMap.get("variant"), + bodyMap.get("page_url"), + bodyMap.get("data.json")) match { case (None, _, _, _, _) => s"${VendorName} context data missing 'page_id'".failureNel case (_, None, _, _, _) => s"${VendorName} context data missing 'page_name'".failureNel case (_, _, None, _, _) => s"${VendorName} context data missing 'variant'".failureNel diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UrbanAirshipAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UrbanAirshipAdapter.scala index 9273b8a74..c9827b047 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UrbanAirshipAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/UrbanAirshipAdapter.scala @@ -10,33 +10,20 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Java import com.fasterxml.jackson.core.JsonParseException -// Scalaz +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} +import org.joda.time.{DateTime, DateTimeZone} import scalaz.Scalaz._ - -// json4s import org.json4s._ import org.json4s.jackson.JsonMethods._ -// Iglu -import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} - -// Joda Time -import org.joda.time.{DateTime, DateTimeZone} -import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} - -// This project -import com.snowplowanalytics.snowplow.enrich.common.loaders.CollectorPayload -import com.snowplowanalytics.snowplow.enrich.common.utils.{JsonUtils => JU} +import loaders.CollectorPayload +import utils.{JsonUtils => JU} /** * Transforms a collector payload which conforms to @@ -53,28 +40,30 @@ object UrbanAirshipAdapter extends Adapter { // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "CLOSE" -> SchemaKey("com.urbanairship.connect", "CLOSE", "jsonschema", "1-0-0").toSchemaUri, - "CUSTOM" -> SchemaKey("com.urbanairship.connect", "CUSTOM", "jsonschema", "1-0-0").toSchemaUri, - "FIRST_OPEN" -> SchemaKey("com.urbanairship.connect", "FIRST_OPEN", "jsonschema", "1-0-0").toSchemaUri, + "CLOSE" -> SchemaKey("com.urbanairship.connect", "CLOSE", "jsonschema", "1-0-0").toSchemaUri, + "CUSTOM" -> SchemaKey("com.urbanairship.connect", "CUSTOM", "jsonschema", "1-0-0").toSchemaUri, + "FIRST_OPEN" -> SchemaKey("com.urbanairship.connect", "FIRST_OPEN", "jsonschema", "1-0-0").toSchemaUri, "IN_APP_MESSAGE_DISPLAY" -> SchemaKey("com.urbanairship.connect", "IN_APP_MESSAGE_DISPLAY", "jsonschema", "1-0-0").toSchemaUri, - "IN_APP_MESSAGE_EXPIRATION" -> SchemaKey("com.urbanairship.connect", - "IN_APP_MESSAGE_EXPIRATION", - "jsonschema", - "1-0-0").toSchemaUri, - "IN_APP_MESSAGE_RESOLUTION" -> SchemaKey("com.urbanairship.connect", - "IN_APP_MESSAGE_RESOLUTION", - "jsonschema", - "1-0-0").toSchemaUri, - "LOCATION" -> SchemaKey("com.urbanairship.connect", "LOCATION", "jsonschema", "1-0-0").toSchemaUri, - "OPEN" -> SchemaKey("com.urbanairship.connect", "OPEN", "jsonschema", "1-0-0").toSchemaUri, - "PUSH_BODY" -> SchemaKey("com.urbanairship.connect", "PUSH_BODY", "jsonschema", "1-0-0").toSchemaUri, - "REGION" -> SchemaKey("com.urbanairship.connect", "REGION", "jsonschema", "1-0-0").toSchemaUri, - "RICH_DELETE" -> SchemaKey("com.urbanairship.connect", "RICH_DELETE", "jsonschema", "1-0-0").toSchemaUri, + "IN_APP_MESSAGE_EXPIRATION" -> SchemaKey( + "com.urbanairship.connect", + "IN_APP_MESSAGE_EXPIRATION", + "jsonschema", + "1-0-0").toSchemaUri, + "IN_APP_MESSAGE_RESOLUTION" -> SchemaKey( + "com.urbanairship.connect", + "IN_APP_MESSAGE_RESOLUTION", + "jsonschema", + "1-0-0").toSchemaUri, + "LOCATION" -> SchemaKey("com.urbanairship.connect", "LOCATION", "jsonschema", "1-0-0").toSchemaUri, + "OPEN" -> SchemaKey("com.urbanairship.connect", "OPEN", "jsonschema", "1-0-0").toSchemaUri, + "PUSH_BODY" -> SchemaKey("com.urbanairship.connect", "PUSH_BODY", "jsonschema", "1-0-0").toSchemaUri, + "REGION" -> SchemaKey("com.urbanairship.connect", "REGION", "jsonschema", "1-0-0").toSchemaUri, + "RICH_DELETE" -> SchemaKey("com.urbanairship.connect", "RICH_DELETE", "jsonschema", "1-0-0").toSchemaUri, "RICH_DELIVERY" -> SchemaKey("com.urbanairship.connect", "RICH_DELIVERY", "jsonschema", "1-0-0").toSchemaUri, - "RICH_HEAD" -> SchemaKey("com.urbanairship.connect", "RICH_HEAD", "jsonschema", "1-0-0").toSchemaUri, - "SEND" -> SchemaKey("com.urbanairship.connect", "SEND", "jsonschema", "1-0-0").toSchemaUri, - "TAG_CHANGE" -> SchemaKey("com.urbanairship.connect", "TAG_CHANGE", "jsonschema", "1-0-0").toSchemaUri, - "UNINSTALL" -> SchemaKey("com.urbanairship.connect", "UNINSTALL", "jsonschema", "1-0-0").toSchemaUri + "RICH_HEAD" -> SchemaKey("com.urbanairship.connect", "RICH_HEAD", "jsonschema", "1-0-0").toSchemaUri, + "SEND" -> SchemaKey("com.urbanairship.connect", "SEND", "jsonschema", "1-0-0").toSchemaUri, + "TAG_CHANGE" -> SchemaKey("com.urbanairship.connect", "TAG_CHANGE", "jsonschema", "1-0-0").toSchemaUri, + "UNINSTALL" -> SchemaKey("com.urbanairship.connect", "UNINSTALL", "jsonschema", "1-0-0").toSchemaUri ) /** @@ -93,25 +82,25 @@ object UrbanAirshipAdapter extends Adapter { try { - val parsed = parse(body_json) + val parsed = parse(body_json) val eventType = (parsed \ "type").extractOpt[String] - val trueTimestamp = (parsed \ "occurred").extractOpt[String] - val eid = (parsed \ "id").extractOpt[String] + val trueTimestamp = (parsed \ "occurred").extractOpt[String] + val eid = (parsed \ "id").extractOpt[String] val collectorTimestamp = (parsed \ "processed").extractOpt[String] lookupSchema(eventType, VendorName, EventSchemaMap) map { schema => RawEvent( api = payload.api, - parameters = toUnstructEventParams(TrackerVersion, - toMap(payload.querystring) ++ Map("ttm" -> toTtmFormat(trueTimestamp.get), - "eid" -> eid.get), - schema, - parsed, - "srv"), + parameters = toUnstructEventParams( + TrackerVersion, + toMap(payload.querystring) ++ Map("ttm" -> toTtmFormat(trueTimestamp.get), "eid" -> eid.get), + schema, + parsed, + "srv"), contentType = payload.contentType, - source = payload.source, - context = payload.context.copy(timestamp = Some(new DateTime(collectorTimestamp.get, DateTimeZone.UTC))) + source = payload.source, + context = payload.context.copy(timestamp = Some(new DateTime(collectorTimestamp.get, DateTimeZone.UTC))) ) } @@ -139,7 +128,7 @@ object UrbanAirshipAdapter extends Adapter { */ def toRawEvents(payload: CollectorPayload)(implicit resolver: Resolver): ValidatedRawEvents = (payload.body, payload.contentType) match { - case (None, _) => s"Request body is empty: no ${VendorName} event to process".failNel + case (None, _) => s"Request body is empty: no ${VendorName} event to process".failNel case (_, Some(ct)) => s"Content type of ${ct} provided, expected None for ${VendorName}".failNel case (Some(body), _) => { val event = payloadBodyToEvent(body, payload) diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/VeroAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/VeroAdapter.scala index 9511f1d7b..5c0ebe538 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/VeroAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/VeroAdapter.scala @@ -10,30 +10,19 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Scalaz -import scalaz.Scalaz._ - -// json4s -import org.json4s._ -import org.json4s.jackson.JsonMethods._ +import scala.util.{Failure, Success, Try} -// Iglu import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} - -// Joda Time import org.joda.time.DateTime +import scalaz.Scalaz._ +import org.json4s._ +import org.json4s.jackson.JsonMethods._ -// This project -import com.snowplowanalytics.snowplow.enrich.common.loaders.CollectorPayload - -import scala.util.{Failure, Success, Try} +import loaders.CollectorPayload /** * Transforms a collector payload which conforms to @@ -53,11 +42,11 @@ object VeroAdapter extends Adapter { // Schemas for reverse-engineering a Snowplow unstructured event private val EventSchemaMap = Map( - "bounced" -> SchemaKey("com.getvero", "bounced", "jsonschema", "1-0-0").toSchemaUri, - "clicked" -> SchemaKey("com.getvero", "clicked", "jsonschema", "1-0-0").toSchemaUri, - "delivered" -> SchemaKey("com.getvero", "delivered", "jsonschema", "1-0-0").toSchemaUri, - "opened" -> SchemaKey("com.getvero", "opened", "jsonschema", "1-0-0").toSchemaUri, - "sent" -> SchemaKey("com.getvero", "sent", "jsonschema", "1-0-0").toSchemaUri, + "bounced" -> SchemaKey("com.getvero", "bounced", "jsonschema", "1-0-0").toSchemaUri, + "clicked" -> SchemaKey("com.getvero", "clicked", "jsonschema", "1-0-0").toSchemaUri, + "delivered" -> SchemaKey("com.getvero", "delivered", "jsonschema", "1-0-0").toSchemaUri, + "opened" -> SchemaKey("com.getvero", "opened", "jsonschema", "1-0-0").toSchemaUri, + "sent" -> SchemaKey("com.getvero", "sent", "jsonschema", "1-0-0").toSchemaUri, "unsubscribed" -> SchemaKey("com.getvero", "unsubscribed", "jsonschema", "1-0-0").toSchemaUri, "user_created" -> SchemaKey("com.getvero", "created", "jsonschema", "1-0-0").toSchemaUri, "user_updated" -> SchemaKey("com.getvero", "updated", "jsonschema", "1-0-0").toSchemaUri @@ -80,17 +69,18 @@ object VeroAdapter extends Adapter { } eventType <- Try((parsed \ "type").extract[String]) match { case Success(et) => et.successNel - case Failure(e) => s"Could not extract type from $VendorName event JSON: [${e.getMessage}]".failureNel + case Failure(e) => s"Could not extract type from $VendorName event JSON: [${e.getMessage}]".failureNel } - formattedEvent = cleanupJsonEventValues(parsed, ("type", eventType).some, s"${eventType}_at") + formattedEvent = cleanupJsonEventValues(parsed, ("type", eventType).some, s"${eventType}_at") reformattedEvent = reformatParameters(formattedEvent) schema <- lookupSchema(eventType.some, VendorName, EventSchemaMap) params = toUnstructEventParams(TrackerVersion, toMap(payload.querystring), schema, reformattedEvent, "srv") - rawEvent = RawEvent(api = payload.api, - parameters = params, - contentType = payload.contentType, - source = payload.source, - context = payload.context) + rawEvent = RawEvent( + api = payload.api, + parameters = params, + contentType = payload.contentType, + source = payload.source, + context = payload.context) } yield rawEvent /** @@ -132,7 +122,7 @@ object VeroAdapter extends Adapter { } json transformField { - case ("_tags", JObject(v)) => ("tags", JObject(v)) + case ("_tags", JObject(v)) => ("tags", JObject(v)) case ("triggered_at", JInt(value)) => ("triggered_at", toStringField(value.toLong * 1000)) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/RedirectAdapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/RedirectAdapter.scala index 3dd171edb..933cb483a 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/RedirectAdapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/RedirectAdapter.scala @@ -10,33 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry package snowplow -// Jackson import com.fasterxml.jackson.databind.JsonNode - -// Iglu -import iglu.client.{Resolver, SchemaCriterion, SchemaKey} -import iglu.client.validation.ValidatableJsonMethods._ - -// Scalaz +import com.snowplowanalytics.iglu.client.{Resolver, SchemaKey} import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// This project import loaders.CollectorPayload -import utils.{JsonUtils => JU} +import utils.{JsonUtils => JU} import utils.{ConversionUtils => CU} /** @@ -91,10 +79,10 @@ object RedirectAdapter extends Adapter { // Already have an event so add the URI redirect as a context (more fiddly) def newCo = Map("co" -> compact(toContext(json))).successNel (originalParams.get("cx"), originalParams.get("co")) match { - case (None, None) => newCo + case (None, None) => newCo case (None, Some(co)) if co == "" => newCo - case (None, Some(co)) => addToExistingCo(json, co).map(str => Map("co" -> str)) - case (Some(cx), _) => addToExistingCx(json, cx).map(str => Map("cx" -> str)) + case (None, Some(co)) => addToExistingCo(json, co).map(str => Map("co" -> str)) + case (Some(cx), _) => addToExistingCx(json, cx).map(str => Map("cx" -> str)) } } else { // Add URI redirect as an unstructured event @@ -103,18 +91,18 @@ object RedirectAdapter extends Adapter { val fixedParams = Map( "tv" -> TrackerVersion, - "p" -> originalParams.getOrElse("p", TrackerPlatform) // Required field + "p" -> originalParams.getOrElse("p", TrackerPlatform) // Required field ) for { np <- newParams ev = NonEmptyList( RawEvent( - api = payload.api, - parameters = (originalParams - "u") ++ np ++ fixedParams, + api = payload.api, + parameters = (originalParams - "u") ++ np ++ fixedParams, contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )) } yield ev } @@ -175,7 +163,7 @@ object RedirectAdapter extends Adapter { private def addToExistingCx(newContext: JValue, existing: String): Validated[String] = for { decoded <- CU.decodeBase64Url("cx", existing).toValidationNel: Validated[String] - added <- addToExistingCo(newContext, decoded) + added <- addToExistingCo(newContext, decoded) recoded = CU.encodeBase64Url(added) } yield recoded diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp1Adapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp1Adapter.scala index 6696c5df5..719997ae6 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp1Adapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp1Adapter.scala @@ -10,22 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry package snowplow -// Iglu -import iglu.client.Resolver - -// Scalaz +import com.snowplowanalytics.iglu.client.Resolver import scalaz._ import Scalaz._ -// This project import loaders.CollectorPayload /** @@ -54,11 +47,11 @@ object Tp1Adapter extends Adapter { } else { NonEmptyList( RawEvent( - api = payload.api, - parameters = params, + api = payload.api, + parameters = params, contentType = payload.contentType, - source = payload.source, - context = payload.context + source = payload.source, + context = payload.context )).success } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp2Adapter.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp2Adapter.scala index 29798021f..ffca54aef 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp2Adapter.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/Tp2Adapter.scala @@ -10,32 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry package snowplow -// Java import java.util.Map.{Entry => JMapEntry} -// Jackson -import com.fasterxml.jackson.databind.JsonNode - -// Scala import scala.collection.JavaConversions._ -// Iglu -import iglu.client.{Resolver, SchemaCriterion} -import iglu.client.validation.ValidatableJsonMethods._ - -// Scalaz +import com.snowplowanalytics.iglu.client.{Resolver, SchemaCriterion} +import com.snowplowanalytics.iglu.client.validation.ValidatableJsonMethods._ +import com.fasterxml.jackson.databind.JsonNode import scalaz._ import Scalaz._ -// This project import loaders.CollectorPayload import utils.{JsonUtils => JU} @@ -48,7 +37,7 @@ object Tp2Adapter extends Adapter { // Expected content types for a request body private object ContentTypes { val list = List("application/json", "application/json; charset=utf-8", "application/json; charset=UTF-8") - val str = list.mkString(", ") + val str = list.mkString(", ") } // Request body expected to validate against this JSON Schema @@ -78,11 +67,11 @@ object Tp2Adapter extends Adapter { case (Some(_), None) => s"Request body provided but content type empty, expected one of: ${ContentTypes.str}".failNel case (None, Some(ct)) => s"Content type of ${ct} provided but request body empty".failNel - case (None, None) => NonEmptyList(qsParams).success + case (None, None) => NonEmptyList(qsParams).success case (Some(bdy), Some(_)) => // Build our NEL of parameters for { json <- extractAndValidateJson("Body", PayloadDataSchema, bdy) - nel <- toParametersNel(json, qsParams) + nel <- toParametersNel(json, qsParams) } yield nel } @@ -112,8 +101,9 @@ object Tp2Adapter extends Adapter { * @return a NEL of Map[String, String] parameters * on Succeess, a NEL of Strings on Failure */ - private def toParametersNel(instance: JsonNode, - mergeWith: RawEventParameters): Validated[NonEmptyList[RawEventParameters]] = { + private def toParametersNel( + instance: JsonNode, + mergeWith: RawEventParameters): Validated[NonEmptyList[RawEventParameters]] = { val events: List[List[Validation[String, (String, String)]]] = for { event <- instance.iterator.toList @@ -156,7 +146,7 @@ object Tp2Adapter extends Adapter { * */ private def toParameter(entry: JMapEntry[String, JsonNode]): Validation[String, Tuple2[String, String]] = { - val key = entry.getKey + val key = entry.getKey val rawValue = entry.getValue Option(rawValue.textValue) match { diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/ClientEnrichments.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/ClientEnrichments.scala index e4bf530f5..44d2c8e76 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/ClientEnrichments.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/ClientEnrichments.scala @@ -13,13 +13,10 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Java import java.lang.{Integer => JInteger} -// Scala import scala.util.control.NonFatal -// Scalaz import scalaz._ import Scalaz._ diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentManager.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentManager.scala index 80fa5e3de..5f9dad6fe 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentManager.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentManager.scala @@ -10,34 +10,25 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Joda -import org.joda.time.DateTime - -// Iglu -import iglu.client.Resolver +import java.nio.charset.Charset -// Scalaz +import com.snowplowanalytics.iglu.client.Resolver +import org.joda.time.DateTime import scalaz._ import Scalaz._ -// This project import adapters.RawEvent -import outputs.EnrichedEvent - +import enrichments.{EventEnrichments => EE} +import enrichments.{MiscEnrichments => ME} +import enrichments.{ClientEnrichments => CE} +import enrichments.web.{PageEnrichments => WPE} import utils.{ConversionUtils => CU, JsonUtils => JU} import utils.MapTransformer._ import utils.shredder.Shredder - -import enrichments.{EventEnrichments => EE} -import enrichments.{MiscEnrichments => ME} -import enrichments.{ClientEnrichments => CE} -import web.{PageEnrichments => WPE} +import outputs.EnrichedEvent /** * A module to hold our enrichment process. @@ -68,7 +59,7 @@ object EnrichmentManager { implicit resolver: Resolver): ValidatedEnrichedEvent = { // Placeholders for where the Success value doesn't matter. // Useful when you're updating large (>22 field) POSOs. - val unitSuccess = ().success[String] + val unitSuccess = ().success[String] val unitSuccessNel = ().successNel[String] // 1. Enrichments not expected to fail @@ -77,10 +68,10 @@ object EnrichmentManager { // with the fields which cannot error val event = { val e = new EnrichedEvent() - e.event_id = EE.generateEventId // May be updated later if we have an `eid` parameter - e.v_collector = raw.source.name // May be updated later if we have a `cv` parameter - e.v_etl = ME.etlVersion(hostEtlVersion) - e.etl_tstamp = EE.toTimestamp(etlTstamp) + e.event_id = EE.generateEventId // May be updated later if we have an `eid` parameter + e.v_collector = raw.source.name // May be updated later if we have a `cv` parameter + e.v_etl = ME.etlVersion(hostEtlVersion) + e.etl_tstamp = EE.toTimestamp(etlTstamp) e.network_userid = raw.context.userId.orNull // May be updated later by 'nuid' e.user_ipaddress = ME.extractIp("user_ipaddress", raw.context.ipAddress.orNull).toOption.orNull // May be updated later by 'ip' e @@ -102,7 +93,7 @@ object EnrichmentManager { // May be updated later if we have a `ua` parameter val useragent = raw.context.useragent match { case Some(ua) => - val u = CU.decodeString(raw.source.encoding, "useragent", ua) + val u = CU.decodeString(Charset.forName(raw.source.encoding), "useragent", ua) u.flatMap { ua => event.useragent = ua ua.success @@ -220,18 +211,18 @@ object EnrichmentManager { val pageUri = WPE.extractPageUri(raw.context.refererUri, Option(event.page_url)) for { uri <- pageUri - u <- uri + u <- uri } { // Update the page_url event.page_url = u.toString // Set the URL components val components = CU.explodeUri(u) - event.page_urlscheme = components.scheme - event.page_urlhost = components.host - event.page_urlport = components.port - event.page_urlpath = components.path.orNull - event.page_urlquery = components.query.orNull + event.page_urlscheme = components.scheme + event.page_urlhost = components.host + event.page_urlport = components.port + event.page_urlpath = components.path.orNull + event.page_urlquery = components.query.orNull event.page_urlfragment = components.fragment.orNull } @@ -240,18 +231,18 @@ object EnrichmentManager { // c.f. https://github.com/snowplow/snowplow/issues/351 val geoLocation = (for { enrichment <- registry.getIpLookupsEnrichment - ip <- Option(event.user_ipaddress) + ip <- Option(event.user_ipaddress) result = { val ipLookupResult = enrichment.extractIpInformation(ip) ipLookupResult.ipLocation.foreach(_.foreach { loc => - event.geo_country = loc.countryCode - event.geo_region = loc.region.orNull - event.geo_city = loc.city.orNull - event.geo_zipcode = loc.postalCode.orNull - event.geo_latitude = loc.latitude - event.geo_longitude = loc.longitude + event.geo_country = loc.countryCode + event.geo_region = loc.region.orNull + event.geo_city = loc.city.orNull + event.geo_zipcode = loc.postalCode.orNull + event.geo_latitude = loc.latitude + event.geo_longitude = loc.longitude event.geo_region_name = loc.regionName.orNull - event.geo_timezone = loc.timezone.orNull + event.geo_timezone = loc.timezone.orNull }) ipLookupResult.isp.foreach(_.foreach { i => event.ip_isp = i @@ -290,9 +281,11 @@ object EnrichmentManager { event.user_ipaddress match { case IPv4Regex(ipv4) if !List(null, "", s"\0").contains(event.useragent) => iab - .getIabContext(Option(event.useragent), - Option(ipv4), - Option(event.derived_tstamp).map(EventEnrichments.fromTimestamp)) + .getIabContext( + Option(event.useragent), + Option(ipv4), + Option(event.derived_tstamp).map(EventEnrichments.fromTimestamp) + ) .map(_.some) case _ => None.success } @@ -303,7 +296,7 @@ object EnrichmentManager { Option(event.user_ipaddress).map(ip => event.user_ipaddress = registry.getAnonIpEnrichment match { case Some(anon) => anon.anonymizeIp(ip) - case None => ip + case None => ip }) // Parse the useragent using user-agent-utils @@ -314,16 +307,16 @@ object EnrichmentManager { case Some(ua) => val ca = uap.extractClientAttributes(ua) ca.flatMap(c => { - event.br_name = c.browserName + event.br_name = c.browserName event.br_family = c.browserFamily c.browserVersion.map(bv => event.br_version = bv) - event.br_type = c.browserType + event.br_type = c.browserType event.br_renderengine = c.browserRenderEngine - event.os_name = c.osName - event.os_family = c.osFamily + event.os_name = c.osName + event.os_family = c.osFamily event.os_manufacturer = c.osManufacturer - event.dvce_type = c.deviceType - event.dvce_ismobile = CU.booleanToJByte(c.deviceIsMobile) + event.dvce_type = c.deviceType + event.dvce_ismobile = CU.booleanToJByte(c.deviceIsMobile) c.success }) ca @@ -340,7 +333,7 @@ object EnrichmentManager { case Some(uap) => { Option(event.useragent) match { case Some(ua) => uap.extractUserAgent(ua).map(_.some) - case None => None.success // No fields updated + case None => None.success // No fields updated } } case None => None.success @@ -354,25 +347,26 @@ object EnrichmentManager { event.base_currency = currency.baseCurrency // Note that stringToMaybeDouble is applied to either-valid-or-null event POJO // properties, so we don't expect any of these four vals to be a Failure - val trTax = CU.stringToMaybeDouble("tr_tx", event.tr_tax).toValidationNel - val tiPrice = CU.stringToMaybeDouble("ti_pr", event.ti_price).toValidationNel - val trTotal = CU.stringToMaybeDouble("tr_tt", event.tr_total).toValidationNel + val trTax = CU.stringToMaybeDouble("tr_tx", event.tr_tax).toValidationNel + val tiPrice = CU.stringToMaybeDouble("ti_pr", event.ti_price).toValidationNel + val trTotal = CU.stringToMaybeDouble("tr_tt", event.tr_total).toValidationNel val trShipping = CU.stringToMaybeDouble("tr_sh", event.tr_shipping).toValidationNel val convertedCu = ((trTotal |@| trTax |@| trShipping |@| tiPrice) { - currency.convertCurrencies(Option(event.tr_currency), - _, - _, - _, - Option(event.ti_currency), - _, - raw.context.timestamp) + currency.convertCurrencies( + Option(event.tr_currency), + _, + _, + _, + Option(event.ti_currency), + _, + raw.context.timestamp) }).flatMap(x => x) for ((total, tax, shipping, price) <- convertedCu.toOption) { - event.tr_total_base = total.orNull - event.tr_tax_base = tax.orNull + event.tr_total_base = total.orNull + event.tr_tax_base = tax.orNull event.tr_shipping_base = shipping.orNull - event.ti_price_base = price.orNull + event.ti_price_base = price.orNull } convertedCu @@ -385,16 +379,16 @@ object EnrichmentManager { val refererUri = CU.stringToUri(event.page_referrer) for { uri <- refererUri - u <- uri + u <- uri } { // Set the URL components val components = CU.explodeUri(u) - event.refr_urlscheme = components.scheme - event.refr_urlhost = components.host - event.refr_urlport = components.port - event.refr_urlpath = components.path.orNull - event.refr_urlquery = components.query.orNull + event.refr_urlscheme = components.scheme + event.refr_urlhost = components.host + event.refr_urlport = components.port + event.refr_urlpath = components.path.orNull + event.refr_urlquery = components.query.orNull event.refr_urlfragment = components.fragment.orNull // Set the referrer details @@ -403,7 +397,7 @@ object EnrichmentManager { for (refr <- rp.extractRefererDetails(u, event.page_urlhost)) { event.refr_medium = CU.makeTsvSafe(refr.medium.toString) event.refr_source = CU.makeTsvSafe(refr.source.orNull) - event.refr_term = CU.makeTsvSafe(refr.term.orNull) + event.refr_term = CU.makeTsvSafe(refr.term.orNull) } } case None => unitSuccess @@ -412,8 +406,9 @@ object EnrichmentManager { // Parse the page URI's querystring val pageQsMap = pageUri match { - case Success(Some(u)) => CU.extractQuerystring(u, raw.source.encoding).map(_.some) - case _ => Success(None) + case Success(Some(u)) => + CU.extractQuerystring(u, Charset.forName(raw.source.encoding)).map(_.some) + case _ => Success(None) } // Marketing attribution @@ -423,13 +418,13 @@ object EnrichmentManager { case Some(ce) => ce.extractMarketingFields(qsMap) .flatMap(cmp => { - event.mkt_medium = CU.makeTsvSafe(cmp.medium.orNull) - event.mkt_source = CU.makeTsvSafe(cmp.source.orNull) - event.mkt_term = CU.makeTsvSafe(cmp.term.orNull) - event.mkt_content = CU.makeTsvSafe(cmp.content.orNull) + event.mkt_medium = CU.makeTsvSafe(cmp.medium.orNull) + event.mkt_source = CU.makeTsvSafe(cmp.source.orNull) + event.mkt_term = CU.makeTsvSafe(cmp.term.orNull) + event.mkt_content = CU.makeTsvSafe(cmp.content.orNull) event.mkt_campaign = CU.makeTsvSafe(cmp.campaign.orNull) - event.mkt_clickid = CU.makeTsvSafe(cmp.clickId.orNull) - event.mkt_network = CU.makeTsvSafe(cmp.network.orNull) + event.mkt_clickid = CU.makeTsvSafe(cmp.clickId.orNull) + event.mkt_network = CU.makeTsvSafe(cmp.network.orNull) cmp.success }) case None => unitSuccessNel @@ -443,7 +438,7 @@ object EnrichmentManager { val crossDomainParseResult = WPE.parseCrossDomain(qsMap) for ((maybeRefrDomainUserid, maybeRefrDvceTstamp) <- crossDomainParseResult.toOption) { maybeRefrDomainUserid.foreach(event.refr_domain_userid = _: String) - maybeRefrDvceTstamp.foreach(event.refr_dvce_tstamp = _: String) + maybeRefrDvceTstamp.foreach(event.refr_dvce_tstamp = _: String) } crossDomainParseResult } @@ -453,7 +448,7 @@ object EnrichmentManager { // This enrichment cannot fail (registry.getEventFingerprintEnrichment match { case Some(efe) => event.event_fingerprint = efe.getEventFingerprint(sourceMap) - case _ => () + case _ => () }) // Validate custom contexts @@ -465,16 +460,16 @@ object EnrichmentManager { // Validate unstructured event val unstructEvent = Shredder.extractAndValidateUnstructEvent(event) match { case Failure(msgs) => msgs.map(_.toString).fail - case Success(ue) => ue.success + case Success(ue) => ue.success } // Extract the event vendor/name/format/version val extractSchema = SchemaEnrichment .extractSchema(event) .map(schemaKey => { - event.event_vendor = schemaKey.vendor - event.event_name = schemaKey.name - event.event_format = schemaKey.format + event.event_vendor = schemaKey.vendor + event.event_name = schemaKey.name + event.event_format = schemaKey.format event.event_version = schemaKey.version unitSuccess }) @@ -482,7 +477,7 @@ object EnrichmentManager { // Execute the JavaScript scripting enrichment val jsScript = registry.getJavascriptScriptEnrichment match { case Some(jse) => jse.process(event) - case None => Nil.success + case None => Nil.success } // Execute cookie extractor enrichment @@ -506,9 +501,10 @@ object EnrichmentManager { // Fetch weather context val weatherContext = registry.getWeatherEnrichment match { case Some(we) => { - we.getWeatherContext(Option(event.geo_latitude), - Option(event.geo_longitude), - Option(event.derived_tstamp).map(EventEnrichments.fromTimestamp)) + we.getWeatherContext( + Option(event.geo_latitude), + Option(event.geo_longitude), + Option(event.derived_tstamp).map(EventEnrichments.fromTimestamp)) .map(_.some) } case None => None.success @@ -568,7 +564,7 @@ object EnrichmentManager { val piiTransform = registry.getPiiPseudonymizerEnrichment match { case Some(enrichment) => enrichment.transformer(event).success - case None => Nil.success + case None => Nil.success } // Collect our errors on Failure, or return our event on Success diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentRegistry.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentRegistry.scala index be7a0a59c..34bdc0491 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentRegistry.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EnrichmentRegistry.scala @@ -10,59 +10,23 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Scalaz import java.net.URI +import com.snowplowanalytics.iglu.client.{Resolver, SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ValidatableJsonMethods._ +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ import scalaz._ import Scalaz._ - -// json4s -import org.json4s.scalaz.JsonScalaz._ import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Iglu -import iglu.client.{Resolver, SchemaCriterion, SchemaKey} -import iglu.client.validation.ValidatableJsonMethods._ -import iglu.client.validation.ProcessingMessageMethods._ - -// This project -import registry.{ - AnonIpEnrichment, - CampaignAttributionEnrichment, - CookieExtractorEnrichment, - CookieExtractorEnrichmentConfig, - CurrencyConversionEnrichment, - CurrencyConversionEnrichmentConfig, - Enrichment, - EventFingerprintEnrichment, - EventFingerprintEnrichmentConfig, - HttpHeaderExtractorEnrichment, - HttpHeaderExtractorEnrichmentConfig, - IabEnrichment, - IpLookupsEnrichment, - JavascriptScriptEnrichment, - JavascriptScriptEnrichmentConfig, - RefererParserEnrichment, - UaParserEnrichment, - UaParserEnrichmentConfig, - UserAgentUtilsEnrichment, - UserAgentUtilsEnrichmentConfig, - WeatherEnrichment, - WeatherEnrichmentConfig, - YauaaEnrichment -} +import registry._ import registry.apirequest.{ApiRequestEnrichment, ApiRequestEnrichmentConfig} import registry.pii.PiiPseudonymizerEnrichment import registry.sqlquery.{SqlQueryEnrichment, SqlQueryEnrichmentConfig} - import utils.ScalazJson4sUtils /** @@ -132,9 +96,11 @@ object EnrichmentRegistry { * @return ValidatedNelMessage boxing Option boxing Tuple2 containing * the Enrichment object and the schemaKey */ - private def buildEnrichmentConfig(schemaKey: SchemaKey, - enrichmentConfig: JValue, - localMode: Boolean): ValidatedNelMessage[Option[(String, Enrichment)]] = { + private def buildEnrichmentConfig( + schemaKey: SchemaKey, + enrichmentConfig: JValue, + localMode: Boolean + ): ValidatedNelMessage[Option[Tuple2[String, Enrichment]]] = { val enabled = ScalazJson4sUtils.extract[Boolean](enrichmentConfig, "enabled").toValidationNel enabled match { case Success(false) => None.success.toValidationNel // Enrichment is disabled diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EventEnrichments.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EventEnrichments.scala index a8ce254b6..a78d1305a 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EventEnrichments.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/EventEnrichments.scala @@ -13,23 +13,14 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Java import java.util.UUID -// Scala import scala.util.control.NonFatal -// Scalaz -import scalaz._ -import Scalaz._ - -// Joda-Time import org.joda.time.{DateTime, DateTimeZone, Period} import org.joda.time.format.DateTimeFormat - -// This project -import utils.{ConversionUtils => CU} -import utils.{JsonUtils => JU} +import scalaz._ +import Scalaz._ /** * Holds the enrichments related to events. @@ -106,7 +97,7 @@ object EventEnrichments { ((dvceSentTstamp, dvceCreatedTstamp, collectorTstamp) match { case (Some(dst), Some(dct), Some(ct)) => val startTstamp = fromTimestamp(dct) - val endTstamp = fromTimestamp(dst) + val endTstamp = fromTimestamp(dst) if (startTstamp.isBefore(endTstamp)) { toTimestamp(fromTimestamp(ct).minus(new Period(startTstamp, endTstamp))).some } else { @@ -136,7 +127,7 @@ object EventEnrichments { */ val extractTimestamp: (String, String) => ValidatedString = (field, tstamp) => try { - val dt = new DateTime(tstamp.toLong) + val dt = new DateTime(tstamp.toLong) val timestampString = toTimestamp(dt) if (timestampString.startsWith("-") || dt.getYear > 9999 || dt.getYear < 0) { s"Field [$field]: [$tstamp] is formatted as [$timestampString] which isn't Redshift-compatible".fail @@ -170,7 +161,7 @@ object EventEnrichments { case "ti" => "transaction_item".success case "pv" => "page_view".success case "pp" => "page_ping".success - case ec => "Field [%s]: [%s] is not a recognised event code".format(field, ec).fail + case ec => "Field [%s]: [%s] is not a recognised event code".format(field, ec).fail } /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/MiscEnrichments.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/MiscEnrichments.scala index 5055c0472..f376c702c 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/MiscEnrichments.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/MiscEnrichments.scala @@ -13,20 +13,14 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Scalaz import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// This project -import utils.{ConversionUtils => CU} - -// Get our project settings import generated.ProjectSettings +import utils.{ConversionUtils => CU} /** * Miscellaneous enrichments which don't fit into @@ -61,15 +55,15 @@ object MiscEnrichments { */ val extractPlatform: (String, String) => ValidatedString = (field, platform) => { platform match { - case "web" => "web".success // Web, including Mobile Web - case "iot" => "iot".success // Internet of Things (e.g. Arduino tracker) - case "app" => "app".success // General App - case "mob" => "mob".success // Mobile / Tablet - case "pc" => "pc".success // Desktop / Laptop / Netbook + case "web" => "web".success // Web, including Mobile Web + case "iot" => "iot".success // Internet of Things (e.g. Arduino tracker) + case "app" => "app".success // General App + case "mob" => "mob".success // Mobile / Tablet + case "pc" => "pc".success // Desktop / Laptop / Netbook case "cnsl" => "cnsl".success // Games Console - case "tv" => "tv".success // Connected TV - case "srv" => "srv".success // Server-side App - case p => "Field [%s]: [%s] is not a supported tracking platform".format(field, p).fail + case "tv" => "tv".success // Connected TV + case "srv" => "srv".success // Server-side App + case p => "Field [%s]: [%s] is not a supported tracking platform".format(field, p).fail } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/SchemaEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/SchemaEnrichment.scala index b9d8602c5..e61b3998a 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/SchemaEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/SchemaEnrichment.scala @@ -10,33 +10,24 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Iglu -import iglu.client.SchemaKey -import iglu.client.Resolver - -// Jackson import com.fasterxml.jackson.databind.JsonNode import com.fasterxml.jackson.databind.node.TextNode +import com.snowplowanalytics.iglu.client.SchemaKey +import com.snowplowanalytics.iglu.client.Resolver +import scalaz._ +import Scalaz._ -// Common import outputs.EnrichedEvent import utils.shredder.Shredder -// Scalaz -import scalaz._ -import Scalaz._ - object SchemaEnrichment { private object Schemas { - val pageViewSchema = SchemaKey("com.snowplowanalytics.snowplow", "page_view", "jsonschema", "1-0-0").success - val pagePingSchema = SchemaKey("com.snowplowanalytics.snowplow", "page_ping", "jsonschema", "1-0-0").success + val pageViewSchema = SchemaKey("com.snowplowanalytics.snowplow", "page_view", "jsonschema", "1-0-0").success + val pagePingSchema = SchemaKey("com.snowplowanalytics.snowplow", "page_ping", "jsonschema", "1-0-0").success val transactionSchema = SchemaKey("com.snowplowanalytics.snowplow", "transaction", "jsonschema", "1-0-0").success val transactionItemSchema = SchemaKey("com.snowplowanalytics.snowplow", "transaction_item", "jsonschema", "1-0-0").success @@ -45,13 +36,13 @@ object SchemaEnrichment { def extractSchema(event: EnrichedEvent)(implicit resolver: Resolver): Validation[String, SchemaKey] = event.event match { - case "page_view" => Schemas.pageViewSchema - case "page_ping" => Schemas.pagePingSchema - case "struct" => Schemas.structSchema - case "transaction" => Schemas.transactionSchema + case "page_view" => Schemas.pageViewSchema + case "page_ping" => Schemas.pagePingSchema + case "struct" => Schemas.structSchema + case "transaction" => Schemas.transactionSchema case "transaction_item" => Schemas.transactionItemSchema - case "unstruct" => extractUnstructSchema(event) - case eventType => "Unrecognized event [%s]".format(eventType).fail + case "unstruct" => extractUnstructSchema(event) + case eventType => "Unrecognized event [%s]".format(eventType).fail } private def extractUnstructSchema(event: EnrichedEvent)(implicit resolver: Resolver): Validation[String, SchemaKey] = diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/AnonIpEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/AnonIpEnrichment.scala index c9d76e08a..59e802196 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/AnonIpEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/AnonIpEnrichment.scala @@ -10,28 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scalaz +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.json4s.{DefaultFormats, JValue} import scalaz._ import Scalaz._ -// json4s -import org.json4s.{DefaultFormats, JValue} - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ - -// This project import utils.ScalazJson4sUtils import java.net.{Inet4Address, Inet6Address} @@ -81,10 +68,10 @@ object AnonIPv4Octets extends Enumeration { type AnonIPv4Octets = Value - val One = Value(1, "1") - val Two = Value(2, "2") + val One = Value(1, "1") + val Two = Value(2, "2") val Three = Value(3, "3") - val All = Value(4, "4") + val All = Value(4, "4") /** * Convert a Stringly-typed integer diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CampaignAttributionEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CampaignAttributionEnrichment.scala index fc672cb35..00ce351da 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CampaignAttributionEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CampaignAttributionEnrichment.scala @@ -10,31 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Java -import java.net.URI - -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scalaz +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import org.json4s.{DefaultFormats, JValue} import scalaz._ import Scalaz._ -// json4s -import org.json4s.{DefaultFormats, JValue} - -import iglu.client.{SchemaCriterion, SchemaKey} - -// This project -import utils.{ConversionUtils => CU} -import utils.MapTransformer.{SourceMap, TransformFunc} +import utils.MapTransformer.SourceMap import utils.ScalazJson4sUtils /** @@ -48,9 +32,9 @@ object CampaignAttributionEnrichment extends ParseableEnrichment { val supportedSchema = SchemaCriterion("com.snowplowanalytics.snowplow", "campaign_attribution", "jsonschema", 1, 0) val DefaultNetworkMap = Map( - "gclid" -> "Google", + "gclid" -> "Google", "msclkid" -> "Microsoft", - "dclid" -> "DoubleClick" + "dclid" -> "DoubleClick" ) /** @@ -64,10 +48,10 @@ object CampaignAttributionEnrichment extends ParseableEnrichment { def parse(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[CampaignAttributionEnrichment] = isParseable(config, schemaKey).flatMap(conf => { (for { - medium <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktMedium") - source <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktSource") - term <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktTerm") - content <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktContent") + medium <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktMedium") + source <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktSource") + term <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktTerm") + content <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktContent") campaign <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "fields", "mktCampaign") customClickMap = ScalazJson4sUtils @@ -78,12 +62,13 @@ object CampaignAttributionEnrichment extends ParseableEnrichment { s => s ) - enrich = CampaignAttributionEnrichment(medium, - source, - term, - content, - campaign, - (DefaultNetworkMap ++ customClickMap).toList) + enrich = CampaignAttributionEnrichment( + medium, + source, + term, + content, + campaign, + (DefaultNetworkMap ++ customClickMap).toList) } yield enrich).toValidationNel }) @@ -152,10 +137,10 @@ case class CampaignAttributionEnrichment( * Validation */ def extractMarketingFields(nvPairs: SourceMap): ValidationNel[String, MarketingCampaign] = { - val medium = getFirstParameter(mediumParameters, nvPairs) - val source = getFirstParameter(sourceParameters, nvPairs) - val term = getFirstParameter(termParameters, nvPairs) - val content = getFirstParameter(contentParameters, nvPairs) + val medium = getFirstParameter(mediumParameters, nvPairs) + val source = getFirstParameter(sourceParameters, nvPairs) + val term = getFirstParameter(termParameters, nvPairs) + val content = getFirstParameter(contentParameters, nvPairs) val campaign = getFirstParameter(campaignParameters, nvPairs) val (clickId, network) = diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CookieExtractorEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CookieExtractorEnrichment.scala index bb9afde2a..5767ddd19 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CookieExtractorEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CookieExtractorEnrichment.scala @@ -10,30 +10,16 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scalaz +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import org.apache.http.message.BasicHeaderValueParser import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -// HttpClient -import org.apache.http.message.BasicHeaderValueParser - -// This project import utils.ScalazJson4sUtils object CookieExtractorEnrichmentConfig extends ParseableEnrichment { @@ -88,7 +74,7 @@ case class CookieExtractorEnrichment( cookies.map { cookie => (("schema" -> "iglu:org.ietf/http_cookie/jsonschema/1-0-0") ~ ("data" -> - ("name" -> cookie.getName) ~ + ("name" -> cookie.getName) ~ ("value" -> cookie.getValue))) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CurrencyConversionEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CurrencyConversionEnrichment.scala index 9fbf92613..a9d468e21 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CurrencyConversionEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/CurrencyConversionEnrichment.scala @@ -10,49 +10,22 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Java import java.net.UnknownHostException -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scala import scala.util.control.NonFatal -// Scalaz +import com.snowplowanalytics.forex.oerclient._ +import com.snowplowanalytics.forex.{Forex, ForexConfig} +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.joda.time.DateTime +import org.json4s.{DefaultFormats, JValue} import scalaz._ import Scalaz._ -// json4s -import org.json4s.{DefaultFormats, JValue} - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ - -// Joda-Time -import org.joda.time.DateTime - -// Scala-Forex -import com.snowplowanalytics.forex.oerclient.{ - AccountType, - DeveloperAccount, - EnterpriseAccount, - OerClientConfig, - OerResponseError, - UnlimitedAccount -} -import com.snowplowanalytics.forex.{Forex, ForexConfig} - -// This project -import common.utils.ConversionUtils import utils.ScalazJson4sUtils /** @@ -70,12 +43,12 @@ object CurrencyConversionEnrichmentConfig extends ParseableEnrichment { def parse(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[CurrencyConversionEnrichment] = isParseable(config, schemaKey).flatMap(conf => { (for { - apiKey <- ScalazJson4sUtils.extract[String](config, "parameters", "apiKey") + apiKey <- ScalazJson4sUtils.extract[String](config, "parameters", "apiKey") baseCurrency <- ScalazJson4sUtils.extract[String](config, "parameters", "baseCurrency") accountType <- (ScalazJson4sUtils.extract[String](config, "parameters", "accountType") match { - case Success("DEVELOPER") => DeveloperAccount.success + case Success("DEVELOPER") => DeveloperAccount.success case Success("ENTERPRISE") => EnterpriseAccount.success - case Success("UNLIMITED") => UnlimitedAccount.success + case Success("UNLIMITED") => UnlimitedAccount.success // Should never happen (prevented by schema validation) case Success(s) => @@ -108,9 +81,10 @@ case class CurrencyConversionEnrichment(accountType: AccountType, apiKey: String * @return None.success if the inputs were not both defined, * otherwise Validation[Option[_]] boxing the result of the conversion */ - private def performConversion(initialCurrency: Option[String], - value: Option[Double], - tstamp: DateTime): Validation[String, Option[String]] = + private def performConversion( + initialCurrency: Option[String], + value: Option[Double], + tstamp: DateTime): Validation[String, Option[String]] = (initialCurrency, value) match { case (Some(ic), Some(v)) => fx.convert(v, ic).to(baseCurrency).at(tstamp) match { @@ -135,28 +109,29 @@ case class CurrencyConversionEnrichment(accountType: AccountType, apiKey: String * @param collectorTstamp Collector timestamp * @return Validation[Tuple] containing all input amounts converted to the base currency */ - def convertCurrencies(trCurrency: Option[String], - trTotal: Option[Double], - trTax: Option[Double], - trShipping: Option[Double], - tiCurrency: Option[String], - tiPrice: Option[Double], - collectorTstamp: Option[DateTime]) + def convertCurrencies( + trCurrency: Option[String], + trTotal: Option[Double], + trTax: Option[Double], + trShipping: Option[Double], + tiCurrency: Option[String], + tiPrice: Option[Double], + collectorTstamp: Option[DateTime]) : ValidationNel[String, (Option[String], Option[String], Option[String], Option[String])] = collectorTstamp match { case Some(tstamp) => try { val newCurrencyTr = performConversion(trCurrency, trTotal, tstamp) val newCurrencyTi = performConversion(tiCurrency, tiPrice, tstamp) - val newTrTax = performConversion(trCurrency, trTax, tstamp) + val newTrTax = performConversion(trCurrency, trTax, tstamp) val newTrShipping = performConversion(trCurrency, trShipping, tstamp) (newCurrencyTr.toValidationNel |@| newTrTax.toValidationNel |@| newTrShipping.toValidationNel |@| newCurrencyTi.toValidationNel) { (_, _, _, _) } } catch { case e: NoSuchElementException => "Base currency [%s] not supported: [%s]".format(baseCurrency, e).failNel - case f: UnknownHostException => "Could not connect to Open Exchange Rates: [%s]".format(f).failNel - case NonFatal(g) => "Unexpected exception converting currency: [%s]".format(g).failNel + case f: UnknownHostException => "Could not connect to Open Exchange Rates: [%s]".format(f).failNel + case NonFatal(g) => "Unexpected exception converting currency: [%s]".format(g).failNel } case None => "Collector timestamp missing".failNel // This should never happen } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/EventFingerprintEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/EventFingerprintEnrichment.scala index c891f6401..960dcca08 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/EventFingerprintEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/EventFingerprintEnrichment.scala @@ -10,32 +10,16 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Apache Commons +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ import org.apache.commons.codec.digest.DigestUtils - -// Scalaz import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.jackson.JsonMethods - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ -// This project import utils.ScalazJson4sUtils /** @@ -60,8 +44,8 @@ object EventFingerprintEnrichmentConfig extends ParseableEnrichment { isParseable(config, schemaKey).flatMap(conf => { (for { excludedParameters <- ScalazJson4sUtils.extract[List[String]](config, "parameters", "excludeParameters") - algorithmName <- ScalazJson4sUtils.extract[String](config, "parameters", "hashAlgorithm") - algorithm <- getAlgorithm(algorithmName) + algorithmName <- ScalazJson4sUtils.extract[String](config, "parameters", "hashAlgorithm") + algorithm <- getAlgorithm(algorithmName) } yield EventFingerprintEnrichment(algorithm, excludedParameters)).toValidationNel }) diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/HttpHeaderExtractorEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/HttpHeaderExtractorEnrichment.scala index 32e4d490f..4283e629b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/HttpHeaderExtractorEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/HttpHeaderExtractorEnrichment.scala @@ -10,28 +10,13 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scalaz -import scalaz._ -import Scalaz._ - -// json4s +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import org.json4s._ import org.json4s.JsonDSL._ -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} - -// This project import utils.ScalazJson4sUtils object HttpHeaderExtractorEnrichmentConfig extends ParseableEnrichment { @@ -79,7 +64,7 @@ case class HttpHeaderExtractorEnrichment(headersPattern: String) extends Enrichm httpHeaders.map { header => (("schema" -> "iglu:org.ietf/http_header/jsonschema/1-0-0") ~ ("data" -> - ("name" -> header.name.trim) ~ + ("name" -> header.name.trim) ~ ("value" -> header.value.trim))) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IabEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IabEnrichment.scala index 674a1e741..d29996076 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IabEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IabEnrichment.scala @@ -10,39 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Java import java.io.File import java.net.{InetAddress, URI, UnknownHostException} -// joda-time +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iab.spidersandrobotsclient.IabClient import org.joda.time.DateTime - -// Scala -import scala.util.control.NonFatal - -// Scalaz -import scalaz._ -import Scalaz._ - -// json4s import org.json4s.{DefaultFormats, Extraction, JObject, JValue} import org.json4s.JsonDSL._ +import scalaz._ +import Scalaz._ -// Iglu -import iglu.client.validation.ProcessingMessageMethods._ -import iglu.client.{SchemaCriterion, SchemaKey} - -// IAB client -import iab.spidersandrobotsclient.IabClient - -// This project import utils.{ConversionUtils, ScalazJson4sUtils} /** @@ -53,11 +35,12 @@ object IabEnrichment extends ParseableEnrichment { implicit val formats = DefaultFormats - val supportedSchema = SchemaCriterion("com.snowplowanalytics.snowplow.enrichments", - "iab_spiders_and_robots_enrichment", - "jsonschema", - 1, - 0) + val supportedSchema = SchemaCriterion( + "com.snowplowanalytics.snowplow.enrichments", + "iab_spiders_and_robots_enrichment", + "jsonschema", + 1, + 0) /** * Creates an IabEnrichment instance from a JValue. @@ -93,7 +76,7 @@ object IabEnrichment extends ParseableEnrichment { private def getIabDbFromName(config: JValue, name: String): Option[ValidatedNelMessage[IabDatabase]] = if (ScalazJson4sUtils.fieldExists(config, "parameters", name)) { val uri = ScalazJson4sUtils.extract[String](config, "parameters", name, "uri") - val db = ScalazJson4sUtils.extract[String](config, "parameters", name, "database") + val db = ScalazJson4sUtils.extract[String](config, "parameters", name, "database") (uri.toValidationNel |@| db.toValidationNel) { (uri, db) => getDatabaseUri(uri, db).toValidationNel.map[IabDatabase](u => IabDatabase(name, u, db)) @@ -128,7 +111,7 @@ object IabEnrichment extends ParseableEnrichment { .stringToUri(uri + (if (uri.endsWith("/")) "" else "/") + database) .flatMap(_ match { case Some(u) => u.success - case None => "URI to IAB file must be provided".fail + case None => "URI to IAB file must be provided".fail }) .toProcessingMessage } @@ -150,7 +133,7 @@ case class IabEnrichment( private type DbEntry = Option[(Option[URI], String)] - private val schemaUri = "iglu:com.iab.snowplow/spiders_and_robots/jsonschema/1-0-0" + private val schemaUri = "iglu:com.iab.snowplow/spiders_and_robots/jsonschema/1-0-0" private implicit val formats = DefaultFormats // Construct a Tuple3 of all IAB files @@ -191,15 +174,17 @@ case class IabEnrichment( * IAB list are relevant or outdated * @return an IabResponse object */ - private[enrichments] def performCheck(userAgent: String, - ipAddress: String, - accurateAt: DateTime): Validation[String, IabEnrichmentResponse] = + private[enrichments] def performCheck( + userAgent: String, + ipAddress: String, + accurateAt: DateTime): Validation[String, IabEnrichmentResponse] = try { val result = iabClient.checkAt(userAgent, InetAddress.getByName(ipAddress), accurateAt.toDate) - IabEnrichmentResponse(result.isSpiderOrRobot, - result.getCategory.toString, - result.getReason.toString, - result.getPrimaryImpact.toString).success + IabEnrichmentResponse( + result.isSpiderOrRobot, + result.getCategory.toString, + result.getReason.toString, + result.getPrimaryImpact.toString).success } catch { case exc: UnknownHostException => s"IP address $ipAddress was invald".failure } @@ -212,9 +197,10 @@ case class IabEnrichment( * @param accurateAt enriched event optional datetime * @return IAB response as a self-describing JSON object */ - def getIabContext(userAgent: Option[String], - ipAddress: Option[String], - accurateAt: Option[DateTime]): Validation[String, JObject] = + def getIabContext( + userAgent: Option[String], + ipAddress: Option[String], + accurateAt: Option[DateTime]): Validation[String, JObject] = getIab(userAgent, ipAddress, accurateAt).map(addSchema) /** @@ -225,16 +211,17 @@ case class IabEnrichment( * @param time enriched event optional datetime * @return IAB response as JSON object */ - private def getIab(userAgent: Option[String], - ipAddress: Option[String], - time: Option[DateTime]): Validation[String, JObject] = + private def getIab( + userAgent: Option[String], + ipAddress: Option[String], + time: Option[DateTime]): Validation[String, JObject] = (userAgent, ipAddress, time) match { case (Some(ua), Some(ip), Some(t)) => performCheck(ua, ip, t) match { case Success(response) => Extraction.decompose(response) match { case obj: JObject => obj.success - case _ => s"Couldn't transform IAB response $response into JSON".failure + case _ => s"Couldn't transform IAB response $response into JSON".failure } case Failure(message) => message.failure } @@ -255,10 +242,11 @@ case class IabEnrichment( /** * Case class copy of `com.snowplowanalytics.iab.spidersandrobotsclient.IabResponse` */ -private[enrichments] case class IabEnrichmentResponse(spiderOrRobot: Boolean, - category: String, - reason: String, - primaryImpact: String) +private[enrichments] case class IabEnrichmentResponse( + spiderOrRobot: Boolean, + category: String, + reason: String, + primaryImpact: String) /** * Case class representing an IAB database location diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IpLookupsEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IpLookupsEnrichment.scala index fe057c27f..67f173bb1 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IpLookupsEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/IpLookupsEnrichment.scala @@ -10,40 +10,20 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry - -// Java -import java.net.URI - -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Scala -import scala.util.control.NonFatal +import java.net.URI -// Scalaz +import com.snowplowanalytics.maxmind.iplookups.IpLookups +import com.snowplowanalytics.maxmind.iplookups.model.IpLookupResult +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.json4s.{DefaultFormats, JValue} import scalaz._ import Scalaz._ -// json4s -import org.json4s.{DefaultFormats, JValue} - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ - -// Scala MaxMind GeoIP -import maxmind.iplookups.IpLookups -import maxmind.iplookups.model.IpLookupResult - -// This project -import common.utils.ConversionUtils -import utils.ScalazJson4sUtils +import utils.{ConversionUtils, ScalazJson4sUtils} /** * Companion object. Lets us create an IpLookupsEnrichment @@ -52,7 +32,7 @@ import utils.ScalazJson4sUtils object IpLookupsEnrichment extends ParseableEnrichment { implicit val formats = DefaultFormats - val supportedSchema = SchemaCriterion("com.snowplowanalytics.snowplow", "ip_lookups", "jsonschema", 2, 0) + val supportedSchema = SchemaCriterion("com.snowplowanalytics.snowplow", "ip_lookups", "jsonschema", 2, 0) /** * Creates an IpLookupsEnrichment instance from a JValue. @@ -86,7 +66,7 @@ object IpLookupsEnrichment extends ParseableEnrichment { private def getArgumentFromName(conf: JValue, name: String): Option[ValidatedNelMessage[(String, URI, String)]] = if (ScalazJson4sUtils.fieldExists(conf, "parameters", name)) { val uri = ScalazJson4sUtils.extract[String](conf, "parameters", name, "uri") - val db = ScalazJson4sUtils.extract[String](conf, "parameters", name, "database") + val db = ScalazJson4sUtils.extract[String](conf, "parameters", name, "database") (uri.toValidationNel |@| db.toValidationNel) { (uri, db) => for { @@ -112,7 +92,7 @@ object IpLookupsEnrichment extends ParseableEnrichment { .stringToUri(uri + "/" + database) .flatMap(_ match { case Some(u) => u.success - case None => "URI to MaxMind file must be provided".fail + case None => "URI to MaxMind file must be provided".fail }) .toProcessingMessage } @@ -138,7 +118,7 @@ case class IpLookupsEnrichment( ) extends Enrichment { private type FinalPath = String - private type DbEntry = Option[(Option[URI], FinalPath)] + private type DbEntry = Option[(Option[URI], FinalPath)] // Construct a Tuple4 of all the IP Lookup databases private val dbs: Tuple4[DbEntry, DbEntry, DbEntry, DbEntry] = { diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/JavascriptScriptEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/JavascriptScriptEnrichment.scala index 20507ee68..46505adce 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/JavascriptScriptEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/JavascriptScriptEnrichment.scala @@ -10,40 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry - -// Scripting -import org.mozilla.javascript._ - -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Jackson -import org.codehaus.jackson.JsonParseException - -// Scala import scala.util.control.NonFatal -// Scalaz +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.mozilla.javascript._ import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.jackson.JsonMethods -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ - -// This project import outputs.EnrichedEvent -import utils.{ScalazJson4sUtils, ConversionUtils, JsonUtils => JU} +import utils.{ConversionUtils, ScalazJson4sUtils} /** * Lets us create a JavascriptScriptEnrichment from a JValue. @@ -66,8 +47,8 @@ object JavascriptScriptEnrichmentConfig extends ParseableEnrichment { def parse(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[JavascriptScriptEnrichment] = isParseable(config, schemaKey).flatMap(conf => { (for { - encoded <- ScalazJson4sUtils.extract[String](config, "parameters", "script") - raw <- ConversionUtils.decodeBase64Url("script", encoded).toProcessingMessage // TODO: shouldn't be URL-safe + encoded <- ScalazJson4sUtils.extract[String](config, "parameters", "script") + raw <- ConversionUtils.decodeBase64Url("script", encoded).toProcessingMessage // TODO: shouldn't be URL-safe compiled <- JavascriptScriptEnrichment.compile(raw).toProcessingMessage enrich = JavascriptScriptEnrichment(compiled) } yield enrich).toValidationNel @@ -82,8 +63,8 @@ object JavascriptScriptEnrichment { object Variables { private val prefix = "$snowplow31337" // To avoid collisions - val In = s"${prefix}In" - val Out = s"${prefix}Out" + val In = s"${prefix}In" + val Out = s"${prefix}Out" } /** @@ -134,7 +115,7 @@ object JavascriptScriptEnrichment { implicit val formats = DefaultFormats private[registry] def process(script: Script, event: EnrichedEvent): Validation[String, List[JObject]] = { - val cx = Context.enter() + val cx = Context.enter() val scope = cx.initStandardObjects try { @@ -156,7 +137,7 @@ object JavascriptScriptEnrichment { try { JsonMethods.parse(obj.asInstanceOf[String]) match { case JArray(elements) => failFastCast(List[JObject](), elements).success - case _ => s"JavaScript script must return an Array; got [${obj}]".fail + case _ => s"JavaScript script must return an Array; got [${obj}]".fail } } catch { case NonFatal(nf) => diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/RefererParserEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/RefererParserEnrichment.scala index 4ff686f0e..197dc5902 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/RefererParserEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/RefererParserEnrichment.scala @@ -10,38 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Java import java.net.URI -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scalaz -import scalaz._ -import Scalaz._ - -// json4s -import org.json4s.{DefaultFormats, JValue} - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ - -// Snowplow referer-parser +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import com.snowplowanalytics.refererparser.scala.{Parser => RefererParser} import com.snowplowanalytics.refererparser.scala.Referer +import org.json4s.{DefaultFormats, JValue} +import scalaz._ -// This project import utils.{ConversionUtils => CU} -import utils.MapTransformer -import utils.MapTransformer._ import utils.ScalazJson4sUtils /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UaParserEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UaParserEnrichment.scala index 721eb9abe..943d92b31 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UaParserEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UaParserEnrichment.scala @@ -9,43 +9,26 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry - -// Maven Artifact +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry + import java.io.{FileInputStream, InputStream} import java.net.URI -import com.snowplowanalytics.snowplow.enrich.common.utils.ConversionUtils -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scala import scala.util.control.NonFatal -// Scalaz +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ import scalaz._ import Scalaz._ - -// ua-parser -import ua_parser.Parser -import ua_parser.Client -import ua_parser.Client - -// json4s import org.json4s._ import org.json4s.DefaultFormats import org.json4s.JValue import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ +import ua_parser.Parser +import ua_parser.Client -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ -import utils.ScalazJson4sUtils +import utils.{ConversionUtils, ScalazJson4sUtils} /** * Companion object. Lets us create a UaParserEnrichment @@ -70,8 +53,8 @@ object UaParserEnrichmentConfig extends ParseableEnrichment { private def getCustomRules(conf: JValue): ValidatedMessage[Option[(URI, String)]] = if (ScalazJson4sUtils.fieldExists(conf, "parameters", "uri")) { for { - uri <- ScalazJson4sUtils.extract[String](conf, "parameters", "uri") - db <- ScalazJson4sUtils.extract[String](conf, "parameters", "database") + uri <- ScalazJson4sUtils.extract[String](conf, "parameters", "uri") + db <- ScalazJson4sUtils.extract[String](conf, "parameters", "database") source <- getUri(uri, db) } yield (source, localRulefile).some } else { @@ -83,7 +66,7 @@ object UaParserEnrichmentConfig extends ParseableEnrichment { .stringToUri(uri + (if (uri.endsWith("/")) "" else "/") + database) .flatMap { case Some(u) => u.success - case None => "A valid URI to ua-parser regex file must be provided".fail + case None => "A valid URI to ua-parser regex file must be provided".fail } .toProcessingMessage } @@ -118,7 +101,7 @@ case class UaParserEnrichment(customRulefile: Option[(URI, String)]) extends Enr val parser = for { input <- tryWithCatch(customRulefile.map(f => new FileInputStream(f._2))) - p <- tryWithCatch(constructParser(input)) + p <- tryWithCatch(constructParser(input)) } yield p parser.leftMap(e => s"Failed to initialize ua parser: [${e.getMessage}]") } @@ -191,17 +174,17 @@ case class UaParserEnrichment(customRulefile: Option[(URI, String)]) extends Enr (("schema" -> "iglu:com.snowplowanalytics.snowplow/ua_parser_context/jsonschema/1-0-0") ~ ("data" -> - ("useragentFamily" -> c.userAgent.family) ~ - ("useragentMajor" -> c.userAgent.major) ~ - ("useragentMinor" -> c.userAgent.minor) ~ - ("useragentPatch" -> c.userAgent.patch) ~ + ("useragentFamily" -> c.userAgent.family) ~ + ("useragentMajor" -> c.userAgent.major) ~ + ("useragentMinor" -> c.userAgent.minor) ~ + ("useragentPatch" -> c.userAgent.patch) ~ ("useragentVersion" -> useragentVersion) ~ - ("osFamily" -> c.os.family) ~ - ("osMajor" -> c.os.major) ~ - ("osMinor" -> c.os.minor) ~ - ("osPatch" -> c.os.patch) ~ - ("osPatchMinor" -> c.os.patchMinor) ~ - ("osVersion" -> osVersion) ~ - ("deviceFamily" -> c.device.family))) + ("osFamily" -> c.os.family) ~ + ("osMajor" -> c.os.major) ~ + ("osMinor" -> c.os.minor) ~ + ("osPatch" -> c.os.patch) ~ + ("osPatchMinor" -> c.os.patchMinor) ~ + ("osVersion" -> osVersion) ~ + ("deviceFamily" -> c.device.family))) } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UserAgentUtilsEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UserAgentUtilsEnrichment.scala index 0849bfa45..8dad21d4b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UserAgentUtilsEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/UserAgentUtilsEnrichment.scala @@ -9,42 +9,22 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scala import scala.util.control.NonFatal -// Scalaz -import scalaz._ -import Scalaz._ - -// Logging -import org.slf4j.LoggerFactory - -// UserAgentUtils +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import eu.bitwalker.useragentutils._ - -// json4s import org.json4s.JValue - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} - -// This project -import utils.ScalazJson4sUtils +import org.slf4j.LoggerFactory +import scalaz._ +import Scalaz._ object UserAgentUtilsEnrichmentConfig extends ParseableEnrichment { val supportedSchema = SchemaCriterion("com.snowplowanalytics.snowplow", "user_agent_utils_config", "jsonschema", 1, 0) - private val log = LoggerFactory.getLogger(getClass()) + private val log = LoggerFactory.getLogger(getClass()) // Creates a UserAgentUtilsEnrichment instance from a JValue def parse(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[UserAgentUtilsEnrichment.type] = { @@ -64,24 +44,26 @@ object UserAgentUtilsEnrichmentConfig extends ParseableEnrichment { * http://stackoverflow.com/questions/17270003/why-are-classes-inside-scala-package-objects-dispreferred */ case class ClientAttributes( - // Browser - browserName: String, - browserFamily: String, - browserVersion: Option[String], - browserType: String, - browserRenderEngine: String, - // OS the browser is running on - osName: String, - osFamily: String, - osManufacturer: String, - // Hardware the OS is running on - deviceType: String, - deviceIsMobile: Boolean) + // Browser + browserName: String, + browserFamily: String, + browserVersion: Option[String], + browserType: String, + browserRenderEngine: String, + // OS the browser is running on + osName: String, + osFamily: String, + osManufacturer: String, + // Hardware the OS is running on + deviceType: String, + deviceIsMobile: Boolean) // Object and a case object with the same name case object UserAgentUtilsEnrichment extends Enrichment { + private val mobileDeviceTypes = Set(DeviceType.MOBILE, DeviceType.TABLET, DeviceType.WEARABLE) + /** * Extracts the client attributes * from a useragent string, using @@ -102,20 +84,20 @@ case object UserAgentUtilsEnrichment extends Enrichment { def extractClientAttributes(useragent: String): Validation[String, ClientAttributes] = try { val ua = UserAgent.parseUserAgentString(useragent) - val b = ua.getBrowser - val v = Option(ua.getBrowserVersion) + val b = ua.getBrowser + val v = Option(ua.getBrowserVersion) val os = ua.getOperatingSystem ClientAttributes( - browserName = b.getName, - browserFamily = b.getGroup.getName, - browserVersion = v map { _.getVersion }, - browserType = b.getBrowserType.getName, + browserName = b.getName, + browserFamily = b.getGroup.getName, + browserVersion = v map { _.getVersion }, + browserType = b.getBrowserType.getName, browserRenderEngine = b.getRenderingEngine.toString, - osName = os.getName, - osFamily = os.getGroup.getName, - osManufacturer = os.getManufacturer.getName, - deviceType = os.getDeviceType.getName, - deviceIsMobile = os.isMobileDevice + osName = os.getName, + osFamily = os.getGroup.getName, + osManufacturer = os.getManufacturer.getName, + deviceType = os.getDeviceType.getName, + deviceIsMobile = mobileDeviceTypes.contains(os.getDeviceType) ).success } catch { case NonFatal(e) => "Exception parsing useragent [%s]: [%s]".format(useragent, e.getMessage).fail diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/WeatherEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/WeatherEnrichment.scala index b114df8f9..a329f1d51 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/WeatherEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/WeatherEnrichment.scala @@ -10,46 +10,23 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Java import java.lang.{Float => JFloat} -// Scala import scala.util.control.NonFatal -// Scalaz -import scalaz._ -import Scalaz._ - -// Joda time -import org.joda.time.{DateTime, DateTimeZone} - -// json4s -import org.json4s.{DefaultFormats, JObject, JValue} -import org.json4s.Extraction -import org.json4s.JsonDSL._ - -// Iglu -import iglu.client.SchemaKey - -// Scala-Weather +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import com.snowplowanalytics.weather.providers.openweather.OwmCacheClient import com.snowplowanalytics.weather.providers.openweather.Responses._ +import org.joda.time.{DateTime, DateTimeZone} +import org.json4s.{DefaultFormats, Extraction, JObject, JValue} +import org.json4s.JsonDSL._ +import scalaz._ +import Scalaz._ -// Iglu -import iglu.client.SchemaCriterion - -// This project import utils.ScalazJson4sUtils -import enrichments.EventEnrichments /** * Companion object. Lets us create an WeatherEnrichment instance from a JValue @@ -65,11 +42,11 @@ object WeatherEnrichmentConfig extends ParseableEnrichment { isParseable(config, schemaKey).flatMap { conf => { (for { - apiKey <- ScalazJson4sUtils.extract[String](config, "parameters", "apiKey") - cacheSize <- ScalazJson4sUtils.extract[Int](config, "parameters", "cacheSize") + apiKey <- ScalazJson4sUtils.extract[String](config, "parameters", "apiKey") + cacheSize <- ScalazJson4sUtils.extract[Int](config, "parameters", "cacheSize") geoPrecision <- ScalazJson4sUtils.extract[Int](config, "parameters", "geoPrecision") - apiHost <- ScalazJson4sUtils.extract[String](config, "parameters", "apiHost") - timeout <- ScalazJson4sUtils.extract[Int](config, "parameters", "timeout") + apiHost <- ScalazJson4sUtils.extract[String](config, "parameters", "apiHost") + timeout <- ScalazJson4sUtils.extract[Int](config, "parameters", "timeout") enrich = WeatherEnrichment(apiKey, cacheSize, geoPrecision, apiHost, timeout) } yield enrich).toValidationNel } @@ -107,9 +84,10 @@ case class WeatherEnrichment(apiKey: String, cacheSize: Int, geoPrecision: Int, */ // It accepts Java Float (JFloat) instead of Scala's because it will throw NullPointerException // on conversion step if `EnrichedEvent` has nulls as geo_latitude or geo_longitude - def getWeatherContext(latitude: Option[JFloat], - longitude: Option[JFloat], - time: Option[DateTime]): Validation[String, JObject] = + def getWeatherContext( + latitude: Option[JFloat], + longitude: Option[JFloat], + time: Option[DateTime]): Validation[String, JObject] = try { getWeather(latitude, longitude, time).map(addSchema) } catch { @@ -124,16 +102,17 @@ case class WeatherEnrichment(apiKey: String, cacheSize: Int, geoPrecision: Int, * @param time enriched event optional time * @return weather stamp as JSON object */ - private def getWeather(latitude: Option[JFloat], - longitude: Option[JFloat], - time: Option[DateTime]): Validation[String, JObject] = + private def getWeather( + latitude: Option[JFloat], + longitude: Option[JFloat], + time: Option[DateTime]): Validation[String, JObject] = (latitude, longitude, time) match { case (Some(lat), Some(lon), Some(t)) => getCachedOrRequest(lat, lon, (t.getMillis / 1000).toInt).flatMap { weatherStamp => val transformedWeather = transformWeather(weatherStamp) Extraction.decompose(transformedWeather) match { case obj: JObject => obj.success - case _ => s"Couldn't transform weather object $transformedWeather into JSON".fail // Shouldn't ever happen + case _ => s"Couldn't transform weather object $transformedWeather into JSON".fail // Shouldn't ever happen } } case _ => s"One of required event fields missing. latitude: $latitude, longitude: $longitude, tstamp: $time".fail @@ -150,7 +129,7 @@ case class WeatherEnrichment(apiKey: String, cacheSize: Int, geoPrecision: Int, private def getCachedOrRequest(latitude: Float, longitude: Float, timestamp: Int): Validation[String, Weather] = client.getCachedOrRequest(latitude, longitude, timestamp) match { case Right(w) => w.success - case Left(e) => e.toString.failure + case Left(e) => e.toString.failure } /** @@ -180,10 +159,11 @@ case class WeatherEnrichment(apiKey: String, cacheSize: Int, geoPrecision: Int, * Copy of `com.snowplowanalytics.weather.providers.openweather.Responses.Weather` intended to * execute typesafe (as opposed to JSON) transformation */ -private[enrichments] case class TransformedWeather(main: MainInfo, - wind: Wind, - clouds: Clouds, - rain: Option[Rain], - snow: Option[Snow], - weather: List[WeatherCondition], - dt: String) +private[enrichments] case class TransformedWeather( + main: MainInfo, + wind: Wind, + clouds: Clouds, + rain: Option[Rain], + snow: Option[Snow], + weather: List[WeatherCondition], + dt: String) diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/ApiRequestEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/ApiRequestEnrichment.scala index da4f842b2..b42b585a3 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/ApiRequestEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/ApiRequestEnrichment.scala @@ -10,30 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry package apirequest -// Java import java.util.UUID -// Scalaz import scalaz._ import Scalaz._ - -// json4s +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import org.json4s.JsonDSL._ import org.json4s._ import org.json4s.jackson.JsonMethods.fromJsonNode -// Iglu -import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} - -// This project -import com.snowplowanalytics.snowplow.enrich.common.outputs.EnrichedEvent -import com.snowplowanalytics.snowplow.enrich.common.utils.ScalazJson4sUtils +import outputs.EnrichedEvent +import utils.ScalazJson4sUtils /** * Lets us create an ApiRequestEnrichmentConfig from a JValue @@ -43,12 +34,13 @@ object ApiRequestEnrichmentConfig extends ParseableEnrichment { implicit val formats = DefaultFormats val supportedSchema = - SchemaCriterion("com.snowplowanalytics.snowplow.enrichments", - "api_request_enrichment_config", - "jsonschema", - 1, - 0, - 0) + SchemaCriterion( + "com.snowplowanalytics.snowplow.enrichments", + "api_request_enrichment_config", + "jsonschema", + 1, + 0, + 0) /** * Creates an ApiRequestEnrichment instance from a JValue. @@ -61,10 +53,10 @@ object ApiRequestEnrichmentConfig extends ParseableEnrichment { def parse(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[ApiRequestEnrichment] = isParseable(config, schemaKey).flatMap(conf => { (for { - inputs <- ScalazJson4sUtils.extract[List[Input]](config, "parameters", "inputs") + inputs <- ScalazJson4sUtils.extract[List[Input]](config, "parameters", "inputs") httpApi <- ScalazJson4sUtils.extract[HttpApi](config, "parameters", "api", "http") outputs <- ScalazJson4sUtils.extract[List[Output]](config, "parameters", "outputs") - cache <- ScalazJson4sUtils.extract[Cache](config, "parameters", "cache") + cache <- ScalazJson4sUtils.extract[Cache](config, "parameters", "cache") } yield ApiRequestEnrichment(inputs, httpApi, outputs, cache)).toValidationNel }) } @@ -83,19 +75,18 @@ case class ApiRequestEnrichment(inputs: List[Input], api: HttpApi, outputs: List * @param derivedContexts derived contexts * @return none if some inputs were missing, validated JSON context if lookup performed */ - def lookup(event: EnrichedEvent, - derivedContexts: List[JObject], - customContexts: JsonSchemaPairs, - unstructEvent: JsonSchemaPairs): ValidationNel[String, List[JObject]] = { - - /** - * Note that [[JsonSchemaPairs]] have specific structure - it is a pair, - * where first element is [[SchemaKey]], second element is JSON Object - * with keys: `data`, `schema` and `hierarchy` and `schema` contains again [[SchemaKey]] - * but as nested JSON object. `schema` and `hierarchy` can be ignored here - */ + def lookup( + event: EnrichedEvent, + derivedContexts: List[JObject], + customContexts: JsonSchemaPairs, + unstructEvent: JsonSchemaPairs): ValidationNel[String, List[JObject]] = { + + // Note that [[JsonSchemaPairs]] have specific structure - it is a pair, + // where first element is [[SchemaKey]], second element is JSON Object + // with keys: `data`, `schema` and `hierarchy` and `schema` contains again [[SchemaKey]] + // but as nested JSON object. `schema` and `hierarchy` can be ignored here val jsonCustomContexts = transformRawPairs(customContexts) - val jsonUnstructEvent = transformRawPairs(unstructEvent).headOption + val jsonUnstructEvent = transformRawPairs(unstructEvent).headOption val templateContext = Input.buildTemplateContext(inputs, event, derivedContexts, jsonCustomContexts, jsonUnstructEvent) @@ -113,8 +104,8 @@ case class ApiRequestEnrichment(inputs: List[Input], api: HttpApi, outputs: List private[apirequest] def getOutputs(validInputs: Option[Map[String, String]]): Validation[String, List[JObject]] = { val result = for { templateContext <- validInputs.toList - url <- api.buildUrl(templateContext).toList - output <- outputs + url <- api.buildUrl(templateContext).toList + output <- outputs body = api.buildBody(templateContext) } yield cachedOrRequest(url, body, output).leftMap(_.toString) result.sequenceU @@ -127,9 +118,10 @@ case class ApiRequestEnrichment(inputs: List[Input], api: HttpApi, outputs: List * @param output currently processing output * @return validated JObject, in case of success ready to be attached to derived contexts */ - private[apirequest] def cachedOrRequest(url: String, - body: Option[String], - output: Output): Validation[Throwable, JObject] = { + private[apirequest] def cachedOrRequest( + url: String, + body: Option[String], + output: Output): Validation[Throwable, JObject] = { val key = cacheKey(url, body) val value = cache.get(key) match { case Some(cachedResponse) => cachedResponse @@ -158,7 +150,7 @@ object ApiRequestEnrichment { def transformRawPairs(pairs: JsonSchemaPairs): List[JObject] = pairs.map { case (schema, node) => - val uri = schema.toSchemaUri + val uri = schema.toSchemaUri val data = fromJsonNode(node) ("schema" -> uri) ~ ("data" -> data \ "data") } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Cache.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Cache.scala index f6d6d5c15..ae009eb1b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Cache.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Cache.scala @@ -10,22 +10,12 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow -package enrich.common.enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.apirequest -// Scalaz -import scalaz._ - -// json4s +import com.twitter.util.SynchronizedLruMap import org.json4s.JValue - -// JodaTime import org.joda.time.DateTime - -// Twitter utils -import com.twitter.util.SynchronizedLruMap +import scalaz._ /** * Just LRU cache @@ -67,6 +57,7 @@ case class Cache(size: Int, ttl: Int) { def put(key: String, value: Validation[Throwable, JValue]): Unit = { val now = (new DateTime().getMillis / 1000).toInt cache.put(key, (value, now)) + () } /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Errors.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Errors.scala index 38d761674..50b148dc9 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Errors.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Errors.scala @@ -10,8 +10,7 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.apirequest case class ValueNotFoundException(message: String) extends Throwable { override def toString = s"Value not found $message" diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/HttpApi.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/HttpApi.scala index 30fceb153..b81753b3e 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/HttpApi.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/HttpApi.scala @@ -10,25 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.apirequest -// Java import java.net.URLEncoder -// json4s import org.json4s.DefaultFormats import org.json4s.jackson.Serialization - -// Scalaz import scalaz._ import Scalaz._ -// This project -import com.snowplowanalytics.snowplow.enrich.common.utils.HttpClient +import utils.HttpClient /** * API client able to make HTTP requests @@ -43,12 +35,12 @@ case class HttpApi(method: String, uri: String, timeout: Int, authentication: Au private val authUser = for { httpBasic <- authentication.httpBasic - user <- httpBasic.username + user <- httpBasic.username } yield user private val authPassword = for { httpBasic <- authentication.httpBasic - password <- httpBasic.password + password <- httpBasic.password } yield password /** @@ -74,7 +66,7 @@ case class HttpApi(method: String, uri: String, timeout: Int, authentication: Au */ private[apirequest] def buildUrl(context: Map[String, String]): Option[String] = { val encodedContext = context.map { case (k, v) => (k, URLEncoder.encode(v, "UTF-8")) } - val url = encodedContext.toList.foldLeft(uri)(replace) + val url = encodedContext.toList.foldLeft(uri)(replace) everythingMatched(url).option(url) } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Input.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Input.scala index 5eec80d14..f0ce0d4e1 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Input.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Input.scala @@ -10,31 +10,23 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow -package enrich.common -package enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.apirequest -// Scala import scala.util.control.NonFatal -// Scalaz import scalaz._ import Scalaz._ - -// Json4s import org.json4s._ import org.json4s.jackson.compactJson -// This project import outputs.EnrichedEvent import utils.JsonPath._ /** * Container for key with one (and only one) of possible input sources * Basically, represents a key for future template context and way to get value - * out of [[EnrichedEvent]], custom context, derived event or unstruct event. + * out of EnrichedEvent, custom context, derived event or unstruct event. * * @param key extracted key * @param pojo optional POJO source to take stright from `EnrichedEvent` @@ -56,7 +48,7 @@ case class Input(key: String, pojo: Option[PojoInput], json: Option[JsonInput]) // but it won't give user meaningful error message val validatedJsonPath = json.map(_.jsonPath).map(compileQuery) match { case Some(compiledQuery) => compiledQuery - case None => "No JSON Input with JSONPath was given".failure + case None => "No JSON Input with JSONPath was given".failure } /** @@ -69,7 +61,7 @@ case class Input(key: String, pojo: Option[PojoInput], json: Option[JsonInput]) case Some(pojoInput) => { try { val method = event.getClass.getMethod(pojoInput.field) - val value = Option(method.invoke(event)).map(_.toString) + val value = Option(method.invoke(event)).map(_.toString) value.map(v => Map(key -> Tags.LastVal(v))).successNel } catch { case NonFatal(err) => s"Error accessing POJO input field [$key]: [$err]".failureNel @@ -91,8 +83,8 @@ case class Input(key: String, pojo: Option[PojoInput], json: Option[JsonInput]) case Some(jsonInput) => { val validatedJson = jsonInput.field match { case "derived_contexts" => getBySchemaCriterion(derived, jsonInput.schemaCriterion).successNel - case "contexts" => getBySchemaCriterion(custom, jsonInput.schemaCriterion).successNel - case "unstruct_event" => getBySchemaCriterion(unstruct.toList, jsonInput.schemaCriterion).successNel + case "contexts" => getBySchemaCriterion(custom, jsonInput.schemaCriterion).successNel + case "unstruct_event" => getBySchemaCriterion(unstruct.toList, jsonInput.schemaCriterion).successNel case other => s"Error: wrong field [$other] passed to Input.getFromJson. Should be one of: derived_contexts, contexts, unstruct_event".failureNel } @@ -136,7 +128,7 @@ object Input { * (key inside double curly braces) in template strings * Failure means failure while accessing particular field, like invalid JSONPath, POJO-access, etc * None means any of required fields were not found, so this lookup need to be skipped in future - * Tag used to not merge values on colliding keys ([[Tags.FirstVal]] can be used as well) + * Tag used to not merge values on colliding keys (`Tags.FirstVal` can be used as well) */ type TemplateContext = ValidationNel[String, Option[Map[String, String @@ Tags.LastVal]]] @@ -158,14 +150,15 @@ object Input { * @param unstructEvent optional unstruct event object * @return final template context */ - def buildTemplateContext(inputs: List[Input], - event: EnrichedEvent, - derivedContexts: List[JObject], - customContexts: List[JObject], - unstructEvent: Option[JObject]): TemplateContext = { + def buildTemplateContext( + inputs: List[Input], + event: EnrichedEvent, + derivedContexts: List[JObject], + customContexts: List[JObject], + unstructEvent: Option[JObject]): TemplateContext = { val eventInputs = buildInputsMap(inputs.map(_.getFromEvent(event))) - val jsonInputs = buildInputsMap(inputs.map(_.getFromJson(derivedContexts, customContexts, unstructEvent))) + val jsonInputs = buildInputsMap(inputs.map(_.getFromJson(derivedContexts, customContexts, unstructEvent))) eventInputs |+| jsonInputs } @@ -183,7 +176,7 @@ object Input { val matched = contexts.filter { context => context.obj.exists { case ("schema", JString(schema)) => schema.startsWith(criterion) - case _ => false + case _ => false } } matched.map(_ \ "data").headOption @@ -197,11 +190,11 @@ object Input { */ private def criterionMatch(schemaCriterion: String): Option[String] = schemaCriterion match { - case criterionRegex(schema, "*", _, _) => s"$schema".some - case criterionRegex(schema, m, "*", _) => s"$schema$m-".some + case criterionRegex(schema, "*", _, _) => s"$schema".some + case criterionRegex(schema, m, "*", _) => s"$schema$m-".some case criterionRegex(schema, m, rev, "*") => s"$schema$m-$rev-".some case criterionRegex(schema, m, rev, add) => s"$schema$m-$rev-$add".some - case _ => None + case _ => None } /** @@ -229,14 +222,14 @@ object Input { * @return some string best represenging JValue or None if there's no way to stringify it */ private def stringifyJson(json: JValue): Option[String] = json match { - case JString(s) => s.some + case JString(s) => s.some case JArray(array) => array.map(stringifyJson).mkString(",").some - case obj: JObject => compactJson(obj).some - case JInt(i) => i.toString.some - case JDouble(d) => d.toString.some - case JDecimal(d) => d.toString.some - case JBool(b) => b.toString.some - case JNull => "null".some // TODO: or None? - case JNothing => none + case obj: JObject => compactJson(obj).some + case JInt(i) => i.toString.some + case JDouble(d) => d.toString.some + case JDecimal(d) => d.toString.some + case JBool(b) => b.toString.some + case JNull => "null".some // TODO: or None? + case JNothing => none } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Output.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Output.scala index 3d6787e70..3ee489790 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Output.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/apirequest/Output.scala @@ -10,25 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.apirequest -// Scalaz -import scalaz._ -import Scalaz._ - -// Scala import scala.util.control.NonFatal -// Json4s +import scalaz._ +import Scalaz._ import org.json4s.{JNothing, JObject, JValue} import org.json4s.JsonDSL._ import org.json4s.jackson.{compactJson, parseJson} -// This project import utils.JsonPath.{query, wrapArray} /** @@ -47,7 +39,7 @@ case class Output(schema: String, json: Option[JsonOutput]) { */ def parse(apiResponse: String): Validation[Throwable, JValue] = json match { case Some(jsonOutput) => jsonOutput.parse(apiResponse) - case output => new InvalidStateException(s"Error: Unknown output [$output]").failure // Cannot happen now + case output => new InvalidStateException(s"Error: Unknown output [$output]").failure // Cannot happen now } /** @@ -58,7 +50,7 @@ case class Output(schema: String, json: Option[JsonOutput]) { */ def extract(value: JValue): Validation[Throwable, JValue] = json match { case Some(jsonOutput) => jsonOutput.extract(value) - case output => new InvalidStateException(s"Error: Unknown output [$output]").failure // Cannot happen now + case output => new InvalidStateException(s"Error: Unknown output [$output]").failure // Cannot happen now } /** @@ -89,11 +81,11 @@ sealed trait ApiOutput[A] { def parse(response: String): Validation[Throwable, A] /** - * Extract value specified by [[path]] and + * Extract value specified by `path` and * transform to context-ready JSON data * * @param response parsed API response - * @return extracted by [[path]] value mapped to JSON + * @return extracted by `path` value mapped to JSON */ def extract(response: A): Validation[Throwable, JValue] @@ -106,7 +98,7 @@ sealed trait ApiOutput[A] { def get(response: String): Validation[Throwable, JValue] = for { validated <- parse(response) - result <- extract(validated) + result <- extract(validated) } yield result } @@ -120,7 +112,7 @@ case class JsonOutput(jsonPath: String) extends ApiOutput[JValue] { val path = jsonPath /** - * Proxy function for [[query]] which wrap missing value in error + * Proxy function for `query` which wrap missing value in error * * @param json JSON value to look in * @return validated found JSON, with absent value treated like failure diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/enrichments.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/enrichments.scala index 7ab971492..102b1283b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/enrichments.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/enrichments.scala @@ -10,32 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry -// Scalaz import java.net.URI +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.json4s.JValue import scalaz._ import Scalaz._ -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// json4s -import org.json4s.JValue - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} -import iglu.client.validation.ProcessingMessageMethods._ - -// This project -import utils.ScalazJson4sUtils - /** * Trait inherited by every enrichment config case class */ diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Mutators.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Mutators.scala index 5ba9fe1eb..1179b5468 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Mutators.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Mutators.scala @@ -10,9 +10,10 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.pii +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.pii -import com.snowplowanalytics.snowplow.enrich.common.outputs.EnrichedEvent +import outputs.EnrichedEvent object Mutators { diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/PiiPseudonymizerEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/PiiPseudonymizerEnrichment.scala index be7f0efdf..fe613b135 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/PiiPseudonymizerEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/PiiPseudonymizerEnrichment.scala @@ -10,16 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich -package common.enrichments.registry +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry package pii -// Scala +import org.apache.commons.codec.digest.DigestUtils + import scala.collection.JavaConverters._ import scala.collection.mutable.MutableList -// Scala libraries +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.{ArrayNode, ObjectNode, TextNode} +import com.jayway.jsonpath.{Configuration, JsonPath => JJsonPath} +import com.jayway.jsonpath.MapFunction +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import org.json4s import org.json4s.{DefaultFormats, Diff, JValue} import org.json4s.JsonAST._ @@ -28,28 +33,11 @@ import org.json4s.jackson.JsonMethods import org.json4s.jackson.JsonMethods.{compact, parse, render} import org.json4s.jackson.Serialization.write import org.json4s.Extraction.decompose - -// Java -import org.apache.commons.codec.digest.DigestUtils - -// Java libraries -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.node.{ArrayNode, ObjectNode, TextNode} -import com.jayway.jsonpath.{Configuration, JsonPath => JJsonPath} -import com.jayway.jsonpath.MapFunction - -// Scalaz import scalaz._ import Scalaz._ -// Iglu -import iglu.client.validation.ProcessingMessageMethods._ -import iglu.client.{SchemaCriterion, SchemaKey} - -// This project -import common.ValidatedNelMessage -import common.utils.ScalazJson4sUtils.{extract, fieldExists} -import common.outputs.EnrichedEvent +import utils.ScalazJson4sUtils.{extract, fieldExists} +import outputs.EnrichedEvent /** * Companion object. Lets us create a PiiPseudonymizerEnrichment @@ -67,21 +55,21 @@ object PiiPseudonymizerEnrichment extends ParseableEnrichment { conf <- matchesSchema(config, schemaKey) emitIdentificationEvent = extract[Boolean](conf, "emitEvent").toOption .getOrElse(false) - piiFields <- extract[List[JObject]](conf, "parameters", "pii").leftMap(_.getMessage) - piiStrategy <- extractStrategy(config) + piiFields <- extract[List[JObject]](conf, "parameters", "pii").leftMap(_.getMessage) + piiStrategy <- extractStrategy(config) piiFieldList <- extractFields(piiFields) } yield PiiPseudonymizerEnrichment(piiFieldList, emitIdentificationEvent, piiStrategy) }.leftMap(_.toProcessingMessageNel) private[pii] def getHashFunction(strategyFunction: String): Validation[String, DigestFunction] = strategyFunction match { - case "MD2" => { DigestUtils.md2Hex(_: Array[Byte]) }.success - case "MD5" => { DigestUtils.md5Hex(_: Array[Byte]) }.success - case "SHA-1" => { DigestUtils.sha1Hex(_: Array[Byte]) }.success + case "MD2" => { DigestUtils.md2Hex(_: Array[Byte]) }.success + case "MD5" => { DigestUtils.md5Hex(_: Array[Byte]) }.success + case "SHA-1" => { DigestUtils.sha1Hex(_: Array[Byte]) }.success case "SHA-256" => { DigestUtils.sha256Hex(_: Array[Byte]) }.success case "SHA-384" => { DigestUtils.sha384Hex(_: Array[Byte]) }.success case "SHA-512" => { DigestUtils.sha512Hex(_: Array[Byte]) }.success - case fName => s"Unknown function $fName".failure + case fName => s"Unknown function $fName".failure } private def extractFields(piiFields: List[JObject]): Validation[String, List[PiiField]] = @@ -142,9 +130,9 @@ object PiiPseudonymizerEnrichment extends ParseableEnrichment { */ final case class PiiStrategyPseudonymize(functionName: String, hashFunction: DigestFunction, salt: String) extends PiiStrategy { - val TextEncoding = "UTF-8" + val TextEncoding = "UTF-8" override def scramble(clearText: String): String = hash(clearText + salt) - def hash(text: String): String = hashFunction(text.getBytes(TextEncoding)) + def hash(text: String): String = hashFunction(text.getBytes(TextEncoding)) } /** @@ -161,9 +149,10 @@ final case class PiiStrategyPseudonymize(functionName: String, hashFunction: Dig * @param emitIdentificationEvent whether to emit an identification event * @param strategy the pseudonymization strategy to use */ -case class PiiPseudonymizerEnrichment(fieldList: List[PiiField], - emitIdentificationEvent: Boolean, - strategy: PiiStrategy) +case class PiiPseudonymizerEnrichment( + fieldList: List[PiiField], + emitIdentificationEvent: Boolean, + strategy: PiiStrategy) extends Enrichment { implicit val json4sFormats = DefaultFormats + new PiiModifiedFieldsSerializer + @@ -225,8 +214,9 @@ final case class PiiJson(fieldMutator: Mutator, schemaCriterion: SchemaCriterion /** * Map context top fields with strategy if they match. */ - private def mapContextTopFields(tuple: (String, json4s.JValue), - strategy: PiiStrategy): (String, (JValue, List[JsonModifiedField])) = tuple match { + private def mapContextTopFields( + tuple: (String, json4s.JValue), + strategy: PiiStrategy): (String, (JValue, List[JsonModifiedField])) = tuple match { case (k: String, contexts: JValue) if k == "data" => (k, contexts match { case JArray(contexts) => @@ -243,20 +233,21 @@ final case class PiiJson(fieldMutator: Mutator, schemaCriterion: SchemaCriterion */ private def getModifiedContext(jv: JValue, strategy: PiiStrategy): (JValue, List[JsonModifiedField]) = jv match { case JObject(context) => modifyObjectIfSchemaMatches(context, strategy) - case x => (x, List.empty[JsonModifiedField]) + case x => (x, List.empty[JsonModifiedField]) } /** * Tests whether the schema for this event matches the schema criterion and if it does modifies it. */ - private def modifyObjectIfSchemaMatches(context: List[(String, json4s.JValue)], - strategy: PiiStrategy): (JObject, List[JsonModifiedField]) = { + private def modifyObjectIfSchemaMatches( + context: List[(String, json4s.JValue)], + strategy: PiiStrategy): (JObject, List[JsonModifiedField]) = { val fieldsObj = context.toMap (for { - schema <- fieldsObj.get("schema") - schemaStr <- schema.extractOpt[String] + schema <- fieldsObj.get("schema") + schemaStr <- schema.extractOpt[String] parsedSchemaMatches <- SchemaKey.parse(schemaStr).map(schemaCriterion.matches).toOption - data <- fieldsObj.get("data") + data <- fieldsObj.get("data") if parsedSchemaMatches updated = jsonPathReplace(data, strategy, schemaStr) } yield (JObject(fieldsObj.updated("schema", schema).updated("data", updated._1).toList), updated._2)) @@ -266,39 +257,41 @@ final case class PiiJson(fieldMutator: Mutator, schemaCriterion: SchemaCriterion /** * Replaces a value in the given context data with the result of applying the strategy that value. */ - private def jsonPathReplace(jValue: JValue, - strategy: PiiStrategy, - schema: String): (JValue, List[JsonModifiedField]) = { - val objectNode = JsonMethods.mapper.valueToTree[ObjectNode](jValue) + private def jsonPathReplace( + jValue: JValue, + strategy: PiiStrategy, + schema: String): (JValue, List[JsonModifiedField]) = { + val objectNode = JsonMethods.mapper.valueToTree[ObjectNode](jValue) val documentContext = JJsonPath.using(JsonPathConf).parse(objectNode) - val modifiedFields = MutableList[JsonModifiedField]() + val modifiedFields = MutableList[JsonModifiedField]() val documentContext2 = documentContext.map( jsonPath, new ScrambleMapFunction(strategy, modifiedFields, fieldMutator.fieldName, jsonPath, schema)) // make sure it is a structure preserving method, see #3636 - val transformedJValue = JsonMethods.fromJsonNode(documentContext.json[JsonNode]()) + val transformedJValue = JsonMethods.fromJsonNode(documentContext.json[JsonNode]()) val Diff(_, erroneouslyAdded, _) = jValue diff transformedJValue - val Diff(_, withoutCruft, _) = erroneouslyAdded diff transformedJValue + val Diff(_, withoutCruft, _) = erroneouslyAdded diff transformedJValue (withoutCruft, modifiedFields.toList) } } -private final class ScrambleMapFunction(strategy: PiiStrategy, - modifiedFields: MutableList[JsonModifiedField], - fieldName: String, - jsonPath: String, - schema: String) +private final class ScrambleMapFunction( + strategy: PiiStrategy, + modifiedFields: MutableList[JsonModifiedField], + fieldName: String, + jsonPath: String, + schema: String) extends MapFunction { override def map(currentValue: AnyRef, configuration: Configuration): AnyRef = currentValue match { case s: String => val newValue = strategy.scramble(s) - val _ = modifiedFields += JsonModifiedField(fieldName, s, newValue, jsonPath, schema) + val _ = modifiedFields += JsonModifiedField(fieldName, s, newValue, jsonPath, schema) newValue case a: ArrayNode => a.elements.asScala.map { case t: TextNode => val originalValue = t.asText() - val newValue = strategy.scramble(originalValue) + val newValue = strategy.scramble(originalValue) modifiedFields += JsonModifiedField(fieldName, originalValue, newValue, jsonPath, schema) newValue case default: AnyRef => default diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Serializers.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Serializers.scala index 636317d38..50bf7257c 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Serializers.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/Serializers.scala @@ -10,17 +10,11 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich -package common.enrichments.registry -package pii +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.pii -// Json4s import org.json4s.JsonDSL._ import org.json4s.Extraction.decompose -import org.json4s.{CustomSerializer, JObject, JString, MappingException} - -// Scalaz +import org.json4s.{CustomSerializer, JObject, MappingException} import scalaz._ import Scalaz._ @@ -59,8 +53,8 @@ private[pii] final class PiiModifiedFieldsSerializer ({ case jo: JObject => implicit val json4sFormats = formats - val fields = (jo \ "data" \ "pii").extract[List[ModifiedField]] - val strategy = (jo \ "data" \ "strategy").extract[PiiStrategy] + val fields = (jo \ "data" \ "pii").extract[List[ModifiedField]] + val strategy = (jo \ "data" \ "strategy").extract[PiiStrategy] PiiModifiedFields(fields, strategy) }, { case pmf: PiiModifiedFields => diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/package.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/package.scala index 4ee847040..86bf743ab 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/package.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/pii/package.scala @@ -13,21 +13,19 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry -// Scala libraries import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} import com.jayway.jsonpath.spi.json.JacksonJsonNodeJsonProvider import com.jayway.jsonpath.{Configuration, Option => JOption} -// Snowplow import outputs.EnrichedEvent package object pii { - type DigestFunction = Function1[Array[Byte], String] - type ModifiedFields = List[ModifiedField] + type DigestFunction = Function1[Array[Byte], String] + type ModifiedFields = List[ModifiedField] type ApplyStrategyFn = (String, PiiStrategy) => (String, ModifiedFields) - type MutatorFn = (EnrichedEvent, PiiStrategy, ApplyStrategyFn) => ModifiedFields + type MutatorFn = (EnrichedEvent, PiiStrategy, ApplyStrategyFn) => ModifiedFields - val JsonMutators = Mutators.JsonMutators + val JsonMutators = Mutators.JsonMutators val ScalarMutators = Mutators.ScalarMutators // Configuration for JsonPath @@ -75,11 +73,12 @@ package pii { /** * Case class for capturing JSON field modifications. */ - private[pii] final case class JsonModifiedField(fieldName: String, - originalValue: String, - modifiedValue: String, - jsonPath: String, - schema: String) + private[pii] final case class JsonModifiedField( + fieldName: String, + originalValue: String, + modifiedValue: String, + jsonPath: String, + schema: String) extends ModifiedField /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Cache.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Cache.scala index 3ef566c2d..50714be6c 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Cache.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Cache.scala @@ -11,22 +11,14 @@ * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.enrich.common -package enrichments.registry -package sqlquery +package enrichments.registry.sqlquery -// Scala import scala.collection.immutable.IntMap -// json4s -import org.json4s.JObject - -// JodaTime -import org.joda.time.DateTime - -// Twitter utils import com.twitter.util.SynchronizedLruMap +import org.joda.time.DateTime +import org.json4s.JObject -// This library import Input.ExtractedValue /** @@ -69,6 +61,7 @@ case class Cache(size: Int, ttl: Int) { def put(key: IntMap[ExtractedValue], value: ThrowableXor[List[JObject]]): Unit = { val now = (new DateTime().getMillis / 1000).toInt cache.put(key, (value, now)) + () } /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Db.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Db.scala index e6aec83aa..76cbeaa3d 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Db.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Db.scala @@ -13,16 +13,12 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry.sqlquery -// Scala -import scala.collection.immutable.IntMap - -// Java import java.sql._ -// json4s +import scala.collection.immutable.IntMap + import org.json4s.MappingException -// This library import Input.ExtractedValue /** @@ -45,7 +41,7 @@ case class Db(postgresql: Option[PostgresqlDb] = None, mysql: Option[MysqlDb] = } /** - * Create [[PreparedStatement]] and fill all its placeholders + * Create PreparedStatement and fill all its placeholders * This function expects `placeholderMap` contains exact same amount of placeholders * as `sql`, otherwise it will result in error downstream * @@ -68,9 +64,7 @@ case class Db(postgresql: Option[PostgresqlDb] = None, mysql: Option[MysqlDb] = def getPlaceholderCount(sql: String): ThrowableXor[Int] = realDb.createEmptyStatement(sql).flatMap(realDb.getPlaceholderCount) - /** - * Execute [[PreparedStatement]] - */ + /** Execute PreparedStatement */ def execute(preparedStatement: PreparedStatement): ThrowableXor[ResultSet] = realDb.execute(preparedStatement) } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Errors.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Errors.scala index f44181ab0..a06177ee7 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Errors.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Errors.scala @@ -10,12 +10,11 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments.registry.sqlquery +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.sqlquery sealed trait SqlQueryEnrichmentError extends Throwable { val message: String - override def toString = message + override def toString = message override def getMessage = message } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Input.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Input.scala index 3ad0fa454..bd0fab016 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Input.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Input.scala @@ -13,34 +13,26 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry.sqlquery -// Scala +import java.sql.PreparedStatement + import scala.collection.immutable.IntMap import scala.util.control.NonFatal -// Java -import java.sql.PreparedStatement - -// JSONPath import io.gatling.jsonpath.JsonPath - -// Scalaz import scalaz._ import Scalaz._ - -// Json4s import org.json4s._ -// This project import utils.JsonPath._ import outputs.EnrichedEvent /** * Container for key with one (and only one) of possible input sources * Basically, represents a key for future template context and way to get value - * out of [[EnrichedEvent]], custom context, derived event or unstruct event. + * out of EnrichedEvent, custom context, derived event or unstruct event. * * @param placeholder extracted key - * @param pojo optional pojo source to take straight from [[EnrichedEvent]] + * @param pojo optional pojo source to take straight from EnrichedEvent * @param json optional JSON source to take from context or unstruct event */ case class Input(placeholder: Int, pojo: Option[PojoInput], json: Option[JsonInput]) { @@ -59,7 +51,7 @@ case class Input(placeholder: Int, pojo: Option[PojoInput], json: Option[JsonInp // but it won't give user meaningful error message val validatedJsonPath = json.map(_.jsonPath).map(compileQuery) match { case Some(compiledQuery) => compiledQuery - case None => "No JSON Input with JSONPath was given".failure + case None => "No JSON Input with JSONPath was given".failure } /** @@ -67,7 +59,7 @@ case class Input(placeholder: Int, pojo: Option[PojoInput], json: Option[JsonInp * * @param event currently enriching event * @return validated pair of placeholder's postition and extracted value ready - * to be set on [[PreparedStatement]] + * to be set on PreparedStatement */ def getFromEvent(event: EnrichedEvent): ValidationNel[Throwable, (Int, Option[ExtractedValue])] = pojo match { case Some(pojoInput) => @@ -94,11 +86,12 @@ case class Input(placeholder: Int, pojo: Option[PojoInput], json: Option[JsonInp * @param custom list of self-describing JObjects representing custom contexts * @param unstruct optional self-describing JObject representing unstruct event * @return validated pair of placeholder's postition and extracted value ready - * to be setted on [[PreparedStatement]] + * to be setted on PreparedStatement */ - def getFromJson(derived: List[JObject], - custom: List[JObject], - unstruct: Option[JObject]): ValidationNel[Throwable, (Int, Option[ExtractedValue])] = + def getFromJson( + derived: List[JObject], + custom: List[JObject], + unstruct: Option[JObject]): ValidationNel[Throwable, (Int, Option[ExtractedValue])] = json match { case Some(jsonInput) => jsonInput.extract(derived, custom, unstruct).map(json => (placeholder, json.flatMap(extractFromJson))) @@ -133,13 +126,14 @@ case class JsonInput(field: String, schemaCriterion: String, jsonPath: String) { * failure means fatal error which should abort enrichment * none means not-found value */ - def extract(derived: List[JObject], - custom: List[JObject], - unstruct: Option[JObject]): ValidationNel[Throwable, Option[JValue]] = { + def extract( + derived: List[JObject], + custom: List[JObject], + unstruct: Option[JObject]): ValidationNel[Throwable, Option[JValue]] = { val validatedJson = field match { case "derived_contexts" => getBySchemaCriterion(derived, schemaCriterion).successNel - case "contexts" => getBySchemaCriterion(custom, schemaCriterion).successNel - case "unstruct_event" => getBySchemaCriterion(unstruct.toList, schemaCriterion).successNel + case "contexts" => getBySchemaCriterion(custom, schemaCriterion).successNel + case "unstruct_event" => getBySchemaCriterion(unstruct.toList, schemaCriterion).successNel case other => InvalidInput( s"SQL Query Enrichment: wrong field [$other] passed to Input.getFromJson. " + @@ -148,7 +142,7 @@ case class JsonInput(field: String, schemaCriterion: String, jsonPath: String) { val validatedJsonPath: Validation[Throwable, JsonPath] = compileQuery(jsonPath) match { case Success(compiledQuery) => compiledQuery.success - case Failure(error) => new Exception(error).failure + case Failure(error) => new Exception(error).failure } (validatedJsonPath.toValidationNel |@| validatedJson) { (jsonPath, validJson) => @@ -169,7 +163,7 @@ object Input { "^(iglu:[a-zA-Z0-9-_.]+/[a-zA-Z0-9-_]+/[a-zA-Z0-9-_]+/)([1-9][0-9]*|\\*)-((?:0|[1-9][0-9]*)|\\*)-((?:0|[1-9][0-9]*)|\\*)$".r /** - * Map all properties inside [[EnrichedEvent]] to textual representations of their types + * Map all properties inside EnrichedEvent to textual representations of their types * It is dynamically configured *once*, when job has started */ val eventTypeMap = classOf[EnrichedEvent].getDeclaredFields @@ -178,27 +172,27 @@ object Input { .toMap /** - * Map all textual representations of types of [[EnrichedEvent]] properties - * to corresponding [[StatementPlaceholder]]s + * Map all textual representations of types of EnrichedEvent properties + * to corresponding StatementPlaceholders */ val typeHandlersMap = Map( - "java.lang.String" -> StringPlaceholder, + "java.lang.String" -> StringPlaceholder, "java.lang.Integer" -> IntPlaceholder, - "java.lang.Byte" -> BytePlaceholder, - "java.lang.Float" -> FloatPlaceholder, + "java.lang.Byte" -> BytePlaceholder, + "java.lang.Float" -> FloatPlaceholder, // Just in case - "String" -> StringPlaceholder, - "scala.Int" -> IntPlaceholder, - "scala.Double" -> DoublePlaceholder, + "String" -> StringPlaceholder, + "scala.Int" -> IntPlaceholder, + "scala.Double" -> DoublePlaceholder, "scala.Boolean" -> BooleanPlaceholder ) /** * Value extracted from POJO or JSON - * It is wrapped into [[StatementPlaceholder#Value]], because its real type + * It is wrapped into StatementPlaceholder#Value, because its real type * is unknown in compile time and all we need is its method * `.set(preparedStatement: PreparedStatement, placeholder: Int): Unit` - * to fill [[PreparedStatement]] + * to fill PreparedStatement */ type ExtractedValue = StatementPlaceholder#Value @@ -221,7 +215,7 @@ object Input { val matched = contexts.filter { context => context.obj.exists { case ("schema", JString(schema)) => schema.startsWith(criterion) - case _ => false + case _ => false } } matched.map(_ \ "data").headOption @@ -235,16 +229,16 @@ object Input { */ private def criterionMatch(schemaCriterion: String): Option[String] = schemaCriterion match { - case criterionRegex(schema, "*", _, _) => s"$schema".some - case criterionRegex(schema, m, "*", _) => s"$schema$m-".some + case criterionRegex(schema, "*", _, _) => s"$schema".some + case criterionRegex(schema, m, "*", _) => s"$schema$m-".some case criterionRegex(schema, m, rev, "*") => s"$schema$m-$rev-".some case criterionRegex(schema, m, rev, add) => s"$schema$m-$rev-$add".some - case _ => None + case _ => None } /** - * Build [[IntMap]] with all sequental input values - * It returns [[Failure]] if **any** of inputs were extracted with fatal error + * Build IntMap with all sequental input values + * It returns Failure if **any** of inputs were extracted with fatal error * (not-found is not a fatal error) * * @param inputs list of all [[Input]] objects @@ -255,14 +249,15 @@ object Input { * @return IntMap if all input values were extracted without error, * non-empty list of errors otherwise */ - def buildPlaceholderMap(inputs: List[Input], - event: EnrichedEvent, - derivedContexts: List[JObject], - customContexts: List[JObject], - unstructEvent: Option[JObject]): ValidationNel[Throwable, PlaceholderMap] = { + def buildPlaceholderMap( + inputs: List[Input], + event: EnrichedEvent, + derivedContexts: List[JObject], + customContexts: List[JObject], + unstructEvent: Option[JObject]): ValidationNel[Throwable, PlaceholderMap] = { val eventInputs = inputs.map(_.getFromEvent(event)) - val jsonInputs = inputs.map(_.getFromJson(derivedContexts, customContexts, unstructEvent)) + val jsonInputs = inputs.map(_.getFromJson(derivedContexts, customContexts, unstructEvent)) val pairs = (eventInputs ++ jsonInputs).sequenceU .asInstanceOf[ValidationNel[Throwable, List[(Int, Option[ExtractedValue])]]] @@ -271,13 +266,13 @@ object Input { // Fail if some indexes are missing pairs.map(list => IntMap(list: _*)) match { case Success(map) if isConsistent(map) => Some(map).successNel - case Success(map) => None.success - case Failure(err) => err.failure + case Success(map) => None.success + case Failure(err) => err.failure } } /** - * Check if there any gaps in keys of [[IntMap]] (like 1,2,4,5) and keys + * Check if there any gaps in keys of IntMap (like 1,2,4,5) and keys * contain "1", so they fill all placeholders * * @param intMap Map with Ints as keys @@ -293,7 +288,7 @@ object Input { } /** - * Convert list of inputs to [[IntMap]] with placeholder as a key + * Convert list of inputs to IntMap with placeholder as a key * It will throw away inputs with clasing placeholders (which is actually * valid configuration state). Used only to check consistency of placeholders */ @@ -309,12 +304,12 @@ object Input { * or None if it is object, array, null or JNothing */ def extractFromJson(json: JValue): Option[ExtractedValue] = json match { - case JString(s) => Some(StringPlaceholder.Value(s)) - case JBool(b) => Some(BooleanPlaceholder.Value(b)) + case JString(s) => Some(StringPlaceholder.Value(s)) + case JBool(b) => Some(BooleanPlaceholder.Value(b)) case JInt(int) if int <= Int.MaxValue && int >= Int.MinValue => Some(IntPlaceholder.Value(int.toInt)) - case JInt(long) => Some(LongPlaceholder.Value(long.toLong)) - case JDouble(d) => Some(DoublePlaceholder.Value(d)) - case _ => None // Objects, Arrays and nulls are invalid ("not-found") values + case JInt(long) => Some(LongPlaceholder.Value(long.toLong)) + case JDouble(d) => Some(DoublePlaceholder.Value(d)) + case _ => None // Objects, Arrays and nulls are invalid ("not-found") values // In API Request Enrichment null is valid value } @@ -322,7 +317,7 @@ object Input { * Get [[StatementPlaceholder]] for specified field * For e.g. "geo_longitude" => [[FloatPlaceholder]] * - * @param field particular property of [[EnrichedEvent]] + * @param field particular property of EnrichedEvent * @return some */ def getFieldType(field: String): Option[StatementPlaceholder] = @@ -330,19 +325,19 @@ object Input { /** * This objects hold a value of some extracted from [[Input]] and - * know how to set this value to [[PreparedStatement]] + * know how to set this value to PreparedStatement */ sealed trait StatementPlaceholder { /** - * This type member represents type of placeholder inside [[PreparedStatement]] + * This type member represents type of placeholder inside PreparedStatement * Known only in runtime */ type PlaceholderType /** - * Closure that accepts [[PreparedStatement]] and returns setter function which - * accepts value (one of allowed types) and its position in [[PreparedStatement]] + * Closure that accepts PreparedStatement and returns setter function which + * accepts value (one of allowed types) and its position in PreparedStatement * * @param preparedStatement statement being mutating * @return setter function closed on prepared statement diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Output.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Output.scala index b1cdff608..140cc24e3 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Output.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Output.scala @@ -13,24 +13,17 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry.sqlquery -// scala +import java.sql.{ResultSet, ResultSetMetaData} + import scala.collection.mutable.ListBuffer -// scalaz import scalaz._ import Scalaz._ - -// java -import java.sql.{ResultSet, ResultSetMetaData} - -// Json4s +import org.joda.time.DateTime import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.parseOpt -// Joda -import org.joda.time.DateTime - /** * Container class for output preferences. * Describes how to transform data fetched from DB into derived contexts @@ -45,9 +38,9 @@ case class Output(json: JsonOutput, expectedRows: String) { * `expectedRows` object converted from String */ val expectedRowsMode = expectedRows match { - case "EXACTLY_ONE" => ExactlyOne - case "AT_MOST_ONE" => AtMostOne - case "AT_LEAST_ONE" => AtLeastOne + case "EXACTLY_ONE" => ExactlyOne + case "AT_MOST_ONE" => AtMostOne + case "AT_LEAST_ONE" => AtLeastOne case "AT_LEAST_ZERO" => AtLeastZero case other => throw new MappingException(s"SQL Query Enrichment: [$other] is unknown value for expectedRows property") @@ -73,29 +66,29 @@ case class Output(json: JsonOutput, expectedRows: String) { resultSet.close() for { - jsons <- parsedJsons + jsons <- parsedJsons contexts <- envelope(jsons) } yield contexts } /** - * Validate output according to [[expectedRows]] and describe - * (attach Schema URI) to context according to [[json.describes]]. + * Validate output according to expectedRows and describe + * (attach Schema URI) to context according to json.describes. * * @param jsons list of JSON Objects derived from SQL rows (row is always JSON Object) * @return validated list of described JSONs */ def envelope(jsons: List[JObject]): ThrowableXor[List[JObject]] = (describeMode, expectedRowsMode) match { - case (AllRows, AtLeastOne) => AtLeastOne.collect(jsons).map(rows => describe(JArray(rows))).map(List(_)) + case (AllRows, AtLeastOne) => AtLeastOne.collect(jsons).map(rows => describe(JArray(rows))).map(List(_)) case (AllRows, AtLeastZero) => AtLeastZero.collect(jsons).map(rows => describe(JArray(rows))).map(List(_)) - case (AllRows, single) => single.collect(jsons).map(_.headOption.map(describe).toList) - case (EveryRow, any) => any.collect(jsons).map(_.map(describe)) + case (AllRows, single) => single.collect(jsons).map(_.headOption.map(describe).toList) + case (EveryRow, any) => any.collect(jsons).map(_.map(describe)) } /** - * Transform [[ResultSet]] (single row) fetched from DB into a JSON Object - * Each column maps to an Object's key with name transformed by [[json.propertyNames]] - * And value transformed using [[JsonOutput.getValue]] + * Transform ResultSet35 (single row) fetched from DB into a JSON Object + * Each column maps to an Object's key with name transformed by json.propertyNames + * And value transformed using [[JsonOutput#getValue]] * * @param resultSet single column result * @return successful raw JSON Object or throwable in case of error @@ -119,7 +112,7 @@ object Output { * ADT specifying whether the schema is the self-describing schema for all * rows returned by the query, or whether the schema should be attached to * each of the returned rows. - * Processing in [[Output.envelope]] + * Processing in [[Output#envelope]] */ sealed trait DescribeMode @@ -161,7 +154,7 @@ object Output { def collect(resultSet: List[JObject]): ThrowableXor[List[JObject]] = resultSet match { case List(one) => List(one).right - case other => InvalidDbResponse(s"SQL Query Enrichment: exactly one row was expected").left + case other => InvalidDbResponse(s"SQL Query Enrichment: exactly one row was expected").left } } @@ -173,8 +166,8 @@ object Output { def collect(resultSet: List[JObject]): ThrowableXor[List[JObject]] = resultSet match { case List(one) => List(one).right - case List() => Nil.right - case other => InvalidDbResponse(s"SQL Query Enrichment: at most one row was expected").left + case List() => Nil.right + case other => InvalidDbResponse(s"SQL Query Enrichment: at most one row was expected").left } } @@ -193,7 +186,7 @@ object Output { case object AtLeastOne extends ExpectedRowsMode { def collect(resultSet: List[JObject]): ThrowableXor[List[JObject]] = resultSet match { - case Nil => InvalidDbResponse(s"SQL Query Enrichment: at least one row was expected. 0 given instead").left + case Nil => InvalidDbResponse(s"SQL Query Enrichment: at least one row was expected. 0 given instead").left case other => other.right } } @@ -209,24 +202,24 @@ case class JsonOutput(schema: String, describes: String, propertyNames: String) import Output._ val describeMode: DescribeMode = describes match { - case "ALL_ROWS" => AllRows + case "ALL_ROWS" => AllRows case "EVERY_ROW" => EveryRow - case p => throw new MappingException(s"Describe [$p] is not allowed") + case p => throw new MappingException(s"Describe [$p] is not allowed") } val propertyNameMode = propertyNames match { - case "AS_IS" => AsIs - case "CAMEL_CASE" => CamelCase + case "AS_IS" => AsIs + case "CAMEL_CASE" => CamelCase case "PASCAL_CASE" => PascalCase - case "SNAKE_CASE" => SnakeCase - case "LOWER_CASE" => LowerCase - case "UPPER_CASE" => UpperCase - case p => throw new MappingException(s"PropertyName [$p] is not allowed") + case "SNAKE_CASE" => SnakeCase + case "LOWER_CASE" => LowerCase + case "UPPER_CASE" => UpperCase + case p => throw new MappingException(s"PropertyName [$p] is not allowed") } /** - * Transform fetched from DB row (as [[ResultSet]]) into JSON object - * All column names are mapped to object keys using [[propertyNames]] + * Transform fetched from DB row (as ResultSet) into JSON object + * All column names are mapped to object keys using propertyNames * * @param resultSet column fetched from DB * @return JSON object as right disjunction in case of success @@ -234,11 +227,11 @@ case class JsonOutput(schema: String, describes: String, propertyNames: String) */ def transform(resultSet: ResultSet): ThrowableXor[JObject] = { val fields = for { - rsMeta <- getMetaData(resultSet).liftM[ListT] - idx <- ListT[ThrowableXor, Int](getColumnCount(rsMeta).map((x: Int) => (1 to x).toList)) + rsMeta <- getMetaData(resultSet).liftM[ListT] + idx <- ListT[ThrowableXor, Int](getColumnCount(rsMeta).map((x: Int) => (1 to x).toList)) colLabel <- getColumnLabel(idx, rsMeta).liftM[ListT] - colType <- getColumnType(idx, rsMeta).liftM[ListT] - value <- getColumnValue(colType, idx, resultSet).liftM[ListT] + colType <- getColumnType(idx, rsMeta).liftM[ListT] + value <- getColumnValue(colType, idx, resultSet).liftM[ListT] } yield propertyNameMode.transform(colLabel) -> value fields.toList.map((x: List[JField]) => JObject(x)) @@ -305,49 +298,49 @@ object JsonOutput { */ val resultsetGetters: Map[String, Object => JValue] = Map( "java.lang.Integer" -> ((obj: Object) => JInt(obj.asInstanceOf[Int])), - "java.lang.Long" -> ((obj: Object) => JInt(obj.asInstanceOf[Long])), + "java.lang.Long" -> ((obj: Object) => JInt(obj.asInstanceOf[Long])), "java.lang.Boolean" -> ((obj: Object) => JBool(obj.asInstanceOf[Boolean])), - "java.lang.Double" -> ((obj: Object) => JDouble(obj.asInstanceOf[Double])), - "java.lang.Float" -> ((obj: Object) => JDouble(obj.asInstanceOf[Float].toDouble)), - "java.lang.String" -> ((obj: Object) => JString(obj.asInstanceOf[String])), - "java.sql.Date" -> ((obj: Object) => JString(new DateTime(obj.asInstanceOf[java.sql.Date]).toString)) + "java.lang.Double" -> ((obj: Object) => JDouble(obj.asInstanceOf[Double])), + "java.lang.Float" -> ((obj: Object) => JDouble(obj.asInstanceOf[Float].toDouble)), + "java.lang.String" -> ((obj: Object) => JString(obj.asInstanceOf[String])), + "java.sql.Date" -> ((obj: Object) => JString(new DateTime(obj.asInstanceOf[java.sql.Date]).toString)) ) /** - * Lift failing [[ResultSet#getMetaData]] into scalaz disjunction + * Lift failing ResultSet#getMetaData into scalaz disjunction * with Throwable as left-side */ def getMetaData(rs: ResultSet): ThrowableXor[ResultSetMetaData] = \/ fromTryCatch rs.getMetaData /** - * Lift failing [[ResultSetMetaData#getColumnCount]] into scalaz disjunction + * Lift failing ResultSetMetaData#getColumnCount into scalaz disjunction * with Throwable as left-side */ def getColumnCount(rsMeta: ResultSetMetaData): ThrowableXor[Int] = \/ fromTryCatch rsMeta.getColumnCount /** - * Lift failing [[ResultSetMetaData#getColumnLabel]] into scalaz disjunction + * Lift failing ResultSetMetaData#getColumnLabel into scalaz disjunction * with Throwable as left-side */ def getColumnLabel(column: Int, rsMeta: ResultSetMetaData): ThrowableXor[String] = \/ fromTryCatch rsMeta.getColumnLabel(column) /** - * Lift failing [[ResultSetMetaData#getColumnClassName]] into scalaz disjunction + * Lift failing ResultSetMetaData#getColumnClassName into scalaz disjunction * with Throwable as left-side */ def getColumnType(column: Int, rsMeta: ResultSetMetaData): ThrowableXor[String] = \/ fromTryCatch rsMeta.getColumnClassName(column) /** - * Get value from [[ResultSet]] using column number + * Get value from ResultSet using column number * * @param datatype stringified type representing real type * @param columnIdx column's number in table * @param rs result set fetched from DB - * @return JSON in case of success or [[Throwable]] in case of SQL error + * @return JSON in case of success or Throwable in case of SQL error */ def getColumnValue(datatype: String, columnIdx: Int, rs: ResultSet): ThrowableXor[JValue] = for { @@ -355,11 +348,11 @@ object JsonOutput { } yield value.map(getValue(_, datatype)).getOrElse(JNull) /** - * Transform value from [[AnyRef]] using stringified type hint + * Transform value from AnyRef using stringified type hint * - * @param anyRef [[AnyRef]] extracted from [[ResultSet]] - * @param datatype stringified type representing [[AnyRef]]'s real type - * @return [[AnyRef]] converted to JSON + * @param anyRef AnyRef extracted from ResultSet + * @param datatype stringified type representing AnyRef's real type + * @return AnyRef converted to JSON */ def getValue(anyRef: AnyRef, datatype: String): JValue = if (anyRef == null) JNull @@ -379,7 +372,7 @@ object JsonOutput { val string = obj.toString parseOpt(string) match { case Some(json) => json - case None => JString(string) + case None => JString(string) } } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Rdbms.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Rdbms.scala index 2bf541294..cc2b21845 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Rdbms.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/Rdbms.scala @@ -13,13 +13,11 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry.sqlquery -// Scalaz +import java.sql._ + import scalaz._ import Scalaz._ -// Java -import java.sql._ - /** * Common trait for all Databases * Contains exception-free logic wrapping JDBC to acquire DB-connection @@ -38,7 +36,7 @@ trait Rdbms { val connectionString: String /** - * Cached connection, it persist until it is open. After closing [[getConnection]] + * Cached connection, it persist until it is open. After closing getConnection * will try to reinitilize it */ private[this] var lastConnection: ThrowableXor[Connection] = @@ -49,7 +47,7 @@ trait Rdbms { * acquired successfully * * @return successful connection if it was in cache or initialized or - * [[Throwable]] as failure + * Throwable as failure */ def getConnection: ThrowableXor[Connection] = lastConnection match { case \/-(c) if !c.isClosed => c.right @@ -61,7 +59,7 @@ trait Rdbms { } /** - * Execute filled [[PreparedStatement]] + * Execute filled PreparedStatement */ def execute(preparedStatement: PreparedStatement): ThrowableXor[ResultSet] = try { @@ -71,13 +69,13 @@ trait Rdbms { } /** - * Get amount of placeholders (?-signs) in [[PreparedStatement]] + * Get amount of placeholders (?-signs) in PreparedStatement */ def getPlaceholderCount(preparedStatement: PreparedStatement): ThrowableXor[Int] = \/ fromTryCatch preparedStatement.getParameterMetaData.getParameterCount /** - * Transform SQL-string with placeholders (?-signs) into [[PreparedStatement]] + * Transform SQL-string with placeholders (?-signs) into PreparedStatement */ def createEmptyStatement(sql: String): ThrowableXor[PreparedStatement] = for { connection <- getConnection } yield connection.prepareStatement(sql) diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/SqlQueryEnrichment.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/SqlQueryEnrichment.scala index d1cf643e4..6fc9212ed 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/SqlQueryEnrichment.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/sqlquery/SqlQueryEnrichment.scala @@ -14,25 +14,15 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments.registry package sqlquery -// Maven Artifact -import org.apache.maven.artifact.versioning.DefaultArtifactVersion - -// Scala import scala.collection.immutable.IntMap -// Scalaz +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.fromJsonNode -// Iglu -import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} - -// This project import outputs.EnrichedEvent import utils.ScalazJson4sUtils @@ -58,10 +48,10 @@ object SqlQueryEnrichmentConfig extends ParseableEnrichment { isParseable(config, schemaKey).flatMap(conf => { (for { inputs <- ScalazJson4sUtils.extract[List[Input]](config, "parameters", "inputs") - db <- ScalazJson4sUtils.extract[Db](config, "parameters", "database") - query <- ScalazJson4sUtils.extract[Query](config, "parameters", "query") + db <- ScalazJson4sUtils.extract[Db](config, "parameters", "database") + query <- ScalazJson4sUtils.extract[Query](config, "parameters", "query") output <- ScalazJson4sUtils.extract[Output](config, "parameters", "output") - cache <- ScalazJson4sUtils.extract[Cache](config, "parameters", "cache") + cache <- ScalazJson4sUtils.extract[Cache](config, "parameters", "cache") } yield SqlQueryEnrichment(inputs, db, query, output, cache)).toValidationNel }) } @@ -90,7 +80,7 @@ case class SqlQueryEnrichment(inputs: List[Input], db: Db, query: Query, output: ): ValidationNel[String, List[JObject]] = { val jsonCustomContexts = transformRawPairs(customContexts) - val jsonUnstructEvent = transformRawPairs(unstructEvent).headOption + val jsonUnstructEvent = transformRawPairs(unstructEvent).headOption val placeholderMap: Validated[Input.PlaceholderMap] = Input @@ -100,13 +90,13 @@ case class SqlQueryEnrichment(inputs: List[Input], db: Db, query: Query, output: placeholderMap match { case Success(Some(intMap)) => get(intMap).leftMap(_.toString).validation.toValidationNel - case Success(None) => Nil.successNel - case Failure(err) => err.map(_.toString).failure + case Success(None) => Nil.successNel + case Failure(err) => err.map(_.toString).failure } } /** - * Get contexts from [[cache]] or perform query if nothing found + * Get contexts from cache or perform query if nothing found * and put result into cache * * @param intMap IntMap of extracted values @@ -130,14 +120,14 @@ case class SqlQueryEnrichment(inputs: List[Input], db: Db, query: Query, output: */ def query(intMap: IntMap[Input.ExtractedValue]): ThrowableXor[List[JObject]] = for { - sqlQuery <- db.createStatement(query.sql, intMap) + sqlQuery <- db.createStatement(query.sql, intMap) resultSet <- db.execute(sqlQuery) - context <- output.convert(resultSet) + context <- output.convert(resultSet) } yield context /** * Transform [[Input.PlaceholderMap]] to None if not enough input values were extracted - * This prevents [[db]] from start building a statement while not failing event enrichment + * This prevents db from start building a statement while not failing event enrichment * * @param placeholderMap some IntMap with extracted values or None if it is known * already that not all values were extracted @@ -147,19 +137,19 @@ case class SqlQueryEnrichment(inputs: List[Input], db: Db, query: Query, output: getPlaceholderCount.map { placeholderCount => placeholderMap match { case Some(intMap) if intMap.keys.size == placeholderCount => Some(intMap) - case _ => None + case _ => None } } /** - * Stored amount of ?-signs in [[query.sql]] + * Stored amount of ?-signs in query.sql * Initialized once */ private var lastPlaceholderCount: Validation[Throwable, Int] = InvalidStateException("SQL Query Enrichment: placeholderCount hasn't been initialized").failure /** - * If [[lastPlaceholderCount]] is successful return it + * If lastPlaceholderCount is successful return it * If it's unsucessfult - try to count save result for future use */ def getPlaceholderCount: ValidationNel[String, Int] = lastPlaceholderCount match { @@ -180,7 +170,7 @@ object SqlQueryEnrichment { /** * Transform pairs of schema and node obtained from [[utils.shredder.Shredder]] - * into list of regular self-describing [[JObject]] representing custom context + * into list of regular self-describing JObject representing custom context * or unstructured event. * If node isn't Self-describing (doesn't contain data key) * it will be filtered out. @@ -191,11 +181,11 @@ object SqlQueryEnrichment { def transformRawPairs(pairs: JsonSchemaPairs): List[JObject] = pairs.flatMap { case (schema, node) => - val uri = schema.toSchemaUri + val uri = schema.toSchemaUri val data = fromJsonNode(node) data \ "data" match { case JNothing => Nil - case json => (("schema" -> uri) ~ ("data" -> json): JObject) :: Nil + case json => (("schema" -> uri) ~ ("data" -> json): JObject) :: Nil } } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/web/PageEnrichments.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/web/PageEnrichments.scala index 992cccb23..201e88036 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/web/PageEnrichments.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/web/PageEnrichments.scala @@ -14,14 +14,11 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments package web -// Java import java.net.URI -// Scalaz import scalaz._ import Scalaz._ -// This project import utils.{ConversionUtils => CU} /** @@ -52,10 +49,10 @@ object PageEnrichments { */ def extractPageUri(fromReferer: Option[String], fromTracker: Option[String]): Validation[String, Option[URI]] = (fromReferer, fromTracker) match { - case (Some(r), None) => CU.stringToUri(r) - case (None, Some(t)) => CU.stringToUri(t) + case (Some(r), None) => CU.stringToUri(r) + case (None, Some(t)) => CU.stringToUri(t) case (Some(r), Some(t)) => CU.stringToUri(t) // Tracker URL takes precedence - case (None, None) => None.success // No page URI available. Not a failable offence + case (None, None) => None.success // No page URI available. Not a failable offence } /** @@ -74,7 +71,7 @@ object PageEnrichments { val duid = CU.makeTsvSafe(crossDomainElements(0)).some val tstamp = crossDomainElements.lift(1) match { case Some(spDtm) => EventEnrichments.extractTimestamp("sp_dtm", spDtm).map(_.some) - case None => None.success + case None => None.success } tstamp.map(duid -> _) diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CljTomcatLoader.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CljTomcatLoader.scala index 49cdf6e59..22a362867 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CljTomcatLoader.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CljTomcatLoader.scala @@ -13,11 +13,11 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Scalaz +import java.nio.charset.StandardCharsets.UTF_8 + import scalaz._ import Scalaz._ -// This project import utils.ConversionUtils /** @@ -30,7 +30,7 @@ import utils.ConversionUtils object CljTomcatLoader extends Loader[String] { // The encoding used on these logs - private val CollectorEncoding = "UTF-8" + private val CollectorEncoding = UTF_8 // The name of this collector private val CollectorName = "clj-tomcat" @@ -39,27 +39,27 @@ object CljTomcatLoader extends Loader[String] { // Adapted and evolved from the Clojure Collector's // regular expression private val CljTomcatRegex = { - val w = "[\\s]+" // Whitespace regex + val w = "[\\s]+" // Whitespace regex val ow = "(?:" + w // Non-capturing optional whitespace begins // Our regex follows. Try debuggex.com if it doesn't make sense ("^([\\S]+)" + // Date / date - w + "([\\S]+)" + // Time / time - w + "(-)" + // - / x-edge-location added for consistency with CloudFront - w + "([\\S]+)" + // BytesSent / sc-bytes - w + "([\\S]+)" + // IPAddress / c-ip - w + "([\\S]+)" + // Operation / cs-method - w + "([\\S]+)" + // Domain / cs(Host) - w + "([\\S]+)" + // Object / cs-uri-stem - w + "([\\S]+)" + // HttpStatus / sc-status - w + "([\\S]+)" + // Referer / cs(Referer) - w + "([\\S]+)" + // UserAgent / cs(User Agent) - w + "([\\S]+)" + // Querystring / cs-uri-query - ow + "-" + // - / cs(Cookie) added for consistency with CloudFront - w + "-" + // - / x-edge-result-type added for consistency with CloudFront - w + "-)?" + // - / x-edge-request-id added for consistency with CloudFront - ow + "([\\S]+)?" + // ContentType / POST support - w + "([\\S]+)?)?$").r // PostBody / POST support + w + "([\\S]+)" + // Time / time + w + "(-)" + // - / x-edge-location added for consistency with CloudFront + w + "([\\S]+)" + // BytesSent / sc-bytes + w + "([\\S]+)" + // IPAddress / c-ip + w + "([\\S]+)" + // Operation / cs-method + w + "([\\S]+)" + // Domain / cs(Host) + w + "([\\S]+)" + // Object / cs-uri-stem + w + "([\\S]+)" + // HttpStatus / sc-status + w + "([\\S]+)" + // Referer / cs(Referer) + w + "([\\S]+)" + // UserAgent / cs(User Agent) + w + "([\\S]+)" + // Querystring / cs-uri-query + ow + "-" + // - / cs(Cookie) added for consistency with CloudFront + w + "-" + // - / x-edge-result-type added for consistency with CloudFront + w + "-)?" + // - / x-edge-request-id added for consistency with CloudFront + ow + "([\\S]+)?" + // ContentType / POST support + w + "([\\S]+)?)?$").r // PostBody / POST support } /** @@ -74,17 +74,18 @@ object CljTomcatLoader extends Loader[String] { */ def toCollectorPayload(line: String): ValidatedMaybeCollectorPayload = { - def build(qs: String, - date: String, - time: String, - ip: String, - ua: String, - refr: String, - objct: String, - ct: Option[String], - bdy: Option[String]): ValidatedMaybeCollectorPayload = { + def build( + qs: String, + date: String, + time: String, + ip: String, + ua: String, + refr: String, + objct: String, + ct: Option[String], + bdy: Option[String]): ValidatedMaybeCollectorPayload = { val querystring = parseQuerystring(CloudfrontLoader.toOption(qs), CollectorEncoding) - val timestamp = CloudfrontLoader.toTimestamp(date, time) + val timestamp = CloudfrontLoader.toTimestamp(date, time) val contentType = (for { enc <- ct raw = ConversionUtils.decodeString(CollectorEncoding, "Content type", enc) @@ -100,7 +101,7 @@ object CljTomcatLoader extends Loader[String] { CollectorPayload( q, CollectorName, - CollectorEncoding, + CollectorEncoding.toString, None, // No hostname for CljTomcat Some(t), CloudfrontLoader.toOption(ip), diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CloudfrontLoader.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CloudfrontLoader.scala index 60a723d01..5863bdb8e 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CloudfrontLoader.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/CloudfrontLoader.scala @@ -13,21 +13,16 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Scala +import java.nio.charset.StandardCharsets.UTF_8 + import scala.util.control.NonFatal import scala.util.matching.Regex -// Scalaz -import scalaz._ -import Scalaz._ - -// Apache Commons import org.apache.commons.lang3.StringUtils - -// Joda-Time import org.joda.time.DateTime +import scalaz._ +import Scalaz._ -// This project import utils.ConversionUtils.singleEncodePcts /** @@ -49,7 +44,7 @@ import utils.ConversionUtils.singleEncodePcts object CloudfrontLoader extends Loader[String] { // The encoding used on CloudFront logs - private val CollectorEncoding = "UTF-8" + private val CollectorEncoding = UTF_8 // The name of this collector private val CollectorName = "cloudfront" @@ -89,11 +84,11 @@ object CloudfrontLoader extends Loader[String] { ) private val CfOriginalPlusAdditionalRegex = toRegex(originalFields, additionalFields = true) - private val CfOriginalRegex = toRegex(originalFields) - private val Cf12Sep2012Regex = toRegex(fields12Sep2012) - private val Cf21Oct2013Regex = toRegex(fields21Oct2013) - private val Cf29Apr2014Regex = toRegex(fields29Apr2014) - private val Cf01Jul2014Regex = toRegex(fields01Jul2014, additionalFields = true) + private val CfOriginalRegex = toRegex(originalFields) + private val Cf12Sep2012Regex = toRegex(fields12Sep2012) + private val Cf21Oct2013Regex = toRegex(fields21Oct2013) + private val Cf29Apr2014Regex = toRegex(fields29Apr2014) + private val Cf01Jul2014Regex = toRegex(fields01Jul2014, additionalFields = true) /** * Converts the source string into a @@ -165,8 +160,8 @@ object CloudfrontLoader extends Loader[String] { */ def toOption(field: String): Option[String] = Option(field) match { case Some("-") => None - case Some("") => None - case s => s // Leaves any other Some(x) or None as-is + case Some("") => None + case s => s // Leaves any other Some(x) or None as-is } /** @@ -192,14 +187,15 @@ object CloudfrontLoader extends Loader[String] { fields.mkString(whitespaceRegex).r } - private case class CloudfrontLogLine(date: String, - time: String, - lastIp: String, - objct: String, - rfr: String, - ua: String, - qs: String, - forwardedFor: String = "-") { + private case class CloudfrontLogLine( + date: String, + time: String, + lastIp: String, + objct: String, + rfr: String, + ua: String, + qs: String, + forwardedFor: String = "-") { def toValidatedMaybeCollectorPayload: ValidatedMaybeCollectorPayload = { // Validations, and let's strip double-encodings @@ -210,10 +206,10 @@ object CloudfrontLoader extends Loader[String] { } // No validation (yet) on the below - val ip = IpAddressExtractor.extractIpAddress(forwardedFor, lastIp) + val ip = IpAddressExtractor.extractIpAddress(forwardedFor, lastIp) val userAgent = singleEncodePcts(ua) - val refr = singleEncodePcts(rfr) - val referer = toOption(refr) map toCleanUri + val refr = singleEncodePcts(rfr) + val referer = toOption(refr) map toCleanUri val api = CollectorApi.parse(objct) @@ -221,7 +217,7 @@ object CloudfrontLoader extends Loader[String] { CollectorPayload( q, CollectorName, - CollectorEncoding, + CollectorEncoding.toString, None, // No hostname for CloudFront Some(t), toOption(ip), diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/IpAddressExtractor.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/IpAddressExtractor.scala index a769fee6d..3e6306689 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/IpAddressExtractor.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/IpAddressExtractor.scala @@ -12,7 +12,6 @@ */ package com.snowplowanalytics.snowplow.enrich.common.loaders -// Scala import scala.annotation.tailrec /** @@ -24,8 +23,8 @@ object IpAddressExtractor { private val ipRegex = """\"?\[?(?:(?:(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*)|((?:[0-9a-f]|\.|\:+)+).*)\]?\"?""" // 1 group IPv4 and 1 IPv6 private val XForwardedForRegex = s"""^x-forwarded-for: $ipRegex.*""".r - private val ForwardedForRegex = s"""^forwarded: for=$ipRegex.*""".r - private val CloudfrontRegex = s"""^$ipRegex.*""".r + private val ForwardedForRegex = s"""^forwarded: for=$ipRegex.*""".r + private val CloudfrontRegex = s"""^$ipRegex.*""".r /** * If a request has been forwarded, extract the original client IP address; @@ -66,6 +65,6 @@ object IpAddressExtractor { */ def extractIpAddress(xForwardedFor: String, lastIp: String): String = xForwardedFor match { case CloudfrontRegex(ipv4, ipv6) => Option(ipv4).getOrElse(ipv6) - case _ => lastIp + case _ => lastIp } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/Loader.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/Loader.scala index a14ee3f72..1f97aaba8 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/Loader.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/Loader.scala @@ -13,17 +13,13 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Java import java.net.URI +import java.nio.charset.Charset -// Apache URLEncodedUtils -import org.apache.http.client.utils.URLEncodedUtils - -// Scala import scala.util.control.NonFatal import scala.collection.JavaConversions._ -// Scalaz +import org.apache.http.client.utils.URLEncodedUtils import scalaz._ import Scalaz._ @@ -33,7 +29,7 @@ import Scalaz._ */ object Loader { - private val TsvRegex = "^tsv/(.*)$".r + private val TsvRegex = "^tsv/(.*)$".r private val NdjsonRegex = "^ndjson/(.*)$".r /** @@ -49,12 +45,12 @@ object Loader { * in a Scalaz Validation */ def getLoader(collectorOrProtocol: String): Validation[String, Loader[_]] = collectorOrProtocol match { - case "cloudfront" => CloudfrontLoader.success - case "clj-tomcat" => CljTomcatLoader.success - case "thrift" => ThriftLoader.success // Finally - a data protocol rather than a piece of software - case TsvRegex(f) => TsvLoader(f).success + case "cloudfront" => CloudfrontLoader.success + case "clj-tomcat" => CljTomcatLoader.success + case "thrift" => ThriftLoader.success // Finally - a data protocol rather than a piece of software + case TsvRegex(f) => TsvLoader(f).success case NdjsonRegex(f) => NdjsonLoader(f).success - case c => "[%s] is not a recognised Snowplow event collector".format(c).fail + case c => "[%s] is not a recognised Snowplow event collector".format(c).fail } } @@ -64,8 +60,6 @@ object Loader { */ abstract class Loader[T] { - import CollectorPayload._ - /** * Converts the source string into a * CanonicalInput. @@ -99,7 +93,7 @@ abstract class Loader[T] { * message, boxed in a Scalaz * Validation */ - protected[loaders] def parseQuerystring(qs: Option[String], enc: String): ValidatedNameValuePairs = qs match { + protected[loaders] def parseQuerystring(qs: Option[String], enc: Charset): ValidatedNameValuePairs = qs match { case Some(q) => { try { URLEncodedUtils.parse(URI.create("http://localhost/?" + q), enc).toList.success diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/NdjsonLoader.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/NdjsonLoader.scala index 11a31b4e3..185e23c2c 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/NdjsonLoader.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/NdjsonLoader.scala @@ -10,25 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common.loaders +package com.snowplowanalytics.snowplow.enrich.common +package loaders -import com.snowplowanalytics.snowplow.enrich.common.ValidatedMaybeCollectorPayload -import org.joda.time.{DateTime, DateTimeZone} - -// Scalaz +import com.fasterxml.jackson.core.JsonParseException import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.jackson.JsonMethods._ -// Java -import com.fasterxml.jackson.core.JsonParseException - case class NdjsonLoader(adapter: String) extends Loader[String] { - private val CollectorName = "ndjson" + private val CollectorName = "ndjson" private val CollectorEncoding = "UTF-8" /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/ThriftLoader.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/ThriftLoader.scala index 933124015..fc13a60fe 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/ThriftLoader.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/ThriftLoader.scala @@ -10,33 +10,22 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich.common +package com.snowplowanalytics.snowplow.enrich.common package loaders -// Apache URLEncodedUtils -import org.apache.http.NameValuePair - -// Joda-Time -import org.joda.time.{DateTime, DateTimeZone} +import java.nio.charset.Charset -// Thrift -import org.apache.thrift.TDeserializer - -// Java conversions import scala.collection.JavaConversions._ -// Scalaz -import scalaz._ -import Scalaz._ - -// Iglu -import iglu.client.{SchemaCriterion, SchemaKey} - -// Snowplow +import com.snowplowanalytics.iglu.client.{SchemaCriterion, SchemaKey} import com.snowplowanalytics.snowplow.CollectorPayload.thrift.model1.{CollectorPayload => CollectorPayload1} import com.snowplowanalytics.snowplow.SchemaSniffer.thrift.model1.SchemaSniffer import com.snowplowanalytics.snowplow.collectors.thrift.SnowplowRawEvent +import org.apache.http.NameValuePair +import org.apache.thrift.TDeserializer +import org.joda.time.{DateTime, DateTimeZone} +import scalaz._ +import Scalaz._ /** * Loader for Thrift SnowplowRawEvent objects. @@ -114,12 +103,12 @@ object ThriftLoader extends Loader[Array[Byte]] { val querystring = parseQuerystring( Option(collectorPayload.querystring), - collectorPayload.encoding + Charset.forName(collectorPayload.encoding) ) - val hostname = Option(collectorPayload.hostname) - val userAgent = Option(collectorPayload.userAgent) - val refererUri = Option(collectorPayload.refererUri) + val hostname = Option(collectorPayload.hostname) + val userAgent = Option(collectorPayload.userAgent) + val refererUri = Option(collectorPayload.refererUri) val networkUserId = Option(collectorPayload.networkUserId) val headers = Option(collectorPayload.headers).map(_.toList).getOrElse(Nil) @@ -127,7 +116,7 @@ object ThriftLoader extends Loader[Array[Byte]] { val ip = IpAddressExtractor.extractIpAddress(headers, collectorPayload.ipAddress).some // Required val api = Option(collectorPayload.path) match { - case None => "Request does not contain a path".fail + case None => "Request does not contain a path".fail case Some(p) => CollectorApi.parse(p) } @@ -175,12 +164,12 @@ object ThriftLoader extends Loader[Array[Byte]] { val querystring = parseQuerystring( Option(snowplowRawEvent.payload.data), - snowplowRawEvent.encoding + Charset.forName(snowplowRawEvent.encoding) ) - val hostname = Option(snowplowRawEvent.hostname) - val userAgent = Option(snowplowRawEvent.userAgent) - val refererUri = Option(snowplowRawEvent.refererUri) + val hostname = Option(snowplowRawEvent.hostname) + val userAgent = Option(snowplowRawEvent.userAgent) + val refererUri = Option(snowplowRawEvent.refererUri) val networkUserId = Option(snowplowRawEvent.networkUserId) val headers = Option(snowplowRawEvent.headers).map(_.toList).getOrElse(Nil) diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/TsvLoader.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/TsvLoader.scala index 68e95d82a..f4b10cffb 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/TsvLoader.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/TsvLoader.scala @@ -13,7 +13,6 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Scalaz import scalaz._ import Scalaz._ diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/collectorPayload.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/collectorPayload.scala index 65f1d5e1b..fcefeb243 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/collectorPayload.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/loaders/collectorPayload.scala @@ -10,18 +10,12 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package loaders +package com.snowplowanalytics.snowplow.enrich.common.loaders -// Scalaz -import scalaz._ -import Scalaz._ - -// Apache URLEncodedUtils import org.apache.http.NameValuePair - -// Joda-Time import org.joda.time.DateTime +import scalaz._ +import Scalaz._ object CollectorPayload { @@ -30,27 +24,29 @@ object CollectorPayload { * tp1 (where no API vendor or version provided * as well as Snowplow). */ - def apply(querystring: List[NameValuePair], - sourceName: String, - sourceEncoding: String, - sourceHostname: Option[String], - contextTimestamp: Option[DateTime], - contextIpAddress: Option[String], - contextUseragent: Option[String], - contextRefererUri: Option[String], - contextHeaders: List[String], - contextUserId: Option[String], - api: CollectorApi, - contentType: Option[String], - body: Option[String]): CollectorPayload = { + def apply( + querystring: List[NameValuePair], + sourceName: String, + sourceEncoding: String, + sourceHostname: Option[String], + contextTimestamp: Option[DateTime], + contextIpAddress: Option[String], + contextUseragent: Option[String], + contextRefererUri: Option[String], + contextHeaders: List[String], + contextUserId: Option[String], + api: CollectorApi, + contentType: Option[String], + body: Option[String]): CollectorPayload = { val source = CollectorSource(sourceName, sourceEncoding, sourceHostname) - val context = CollectorContext(contextTimestamp, - contextIpAddress, - contextUseragent, - contextRefererUri, - contextHeaders, - contextUserId) + val context = CollectorContext( + contextTimestamp, + contextIpAddress, + contextUseragent, + contextRefererUri, + contextHeaders, + contextUserId) CollectorPayload(api, querystring, contentType, body, source, context) } @@ -78,7 +74,7 @@ object CollectorApi { * CollectorApi or a Failure String. */ def parse(path: String): Validation[String, CollectorApi] = path match { - case ApiPathRegex(vnd, ver) => CollectorApi(vnd, ver).success + case ApiPathRegex(vnd, ver) => CollectorApi(vnd, ver).success case _ if isIceRequest(path) => SnowplowTp1.success case _ => s"Request path ${path} does not match (/)vendor/version(/) pattern nor is a legacy /i(ce.png) request".fail diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/BadRow.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/BadRow.scala index 2b7352d92..f91862989 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/BadRow.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/BadRow.scala @@ -10,29 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package outputs +package com.snowplowanalytics.snowplow.enrich.common.outputs -// Scalaz +import com.snowplowanalytics.iglu.client.ProcessingMessageNel +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.joda.time.{DateTime, DateTimeZone} +import org.joda.time.format.DateTimeFormat import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Joda-Time -import org.joda.time.{DateTime, DateTimeZone} -import org.joda.time.format.DateTimeFormat - -// Iglu Scala Client -import iglu.client.ProcessingMessageNel -import iglu.client.validation.ProcessingMessageMethods._ - /** * Alternate BadRow constructors */ @@ -58,8 +47,8 @@ object BadRow { */ def oversizedRow(size: Long, errors: NonEmptyList[String], tstamp: Long = System.currentTimeMillis()): String = compact( - ("size" -> size) ~ - ("errors" -> errors.toList.map(e => fromJsonNode(e.toProcessingMessage.asJson))) ~ + ("size" -> size) ~ + ("errors" -> errors.toList.map(e => fromJsonNode(e.toProcessingMessage.asJson))) ~ ("failure_tstamp" -> tstamp) ) } @@ -87,8 +76,8 @@ case class BadRow( * @return the TypeHierarchy as a json4s JValue */ def toJValue: JValue = - ("line" -> line) ~ - ("errors" -> errors.toList.map(e => fromJsonNode(e.asJson))) ~ + ("line" -> line) ~ + ("errors" -> errors.toList.map(e => fromJsonNode(e.asJson))) ~ ("failure_tstamp" -> this.getTimestamp(tstamp)) /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/EnrichedEvent.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/EnrichedEvent.scala index 91c956a78..c6fe93f22 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/EnrichedEvent.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/outputs/EnrichedEvent.scala @@ -10,15 +10,12 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package outputs +package com.snowplowanalytics.snowplow.enrich.common.outputs -// Java import java.lang.{Integer => JInteger} -import java.lang.{Float => JFloat} -import java.lang.{Byte => JByte} +import java.lang.{Float => JFloat} +import java.lang.{Byte => JByte} -// Scala import scala.beans.BeanProperty /** @@ -39,79 +36,79 @@ import scala.beans.BeanProperty class EnrichedEvent extends Serializable { // The application (site, game, app etc) this event belongs to, and the tracker platform - @BeanProperty var app_id: String = _ + @BeanProperty var app_id: String = _ @BeanProperty var platform: String = _ // Date/time - @BeanProperty var etl_tstamp: String = _ - @BeanProperty var collector_tstamp: String = _ + @BeanProperty var etl_tstamp: String = _ + @BeanProperty var collector_tstamp: String = _ @BeanProperty var dvce_created_tstamp: String = _ // Transaction (i.e. this logging event) - @BeanProperty var event: String = _ + @BeanProperty var event: String = _ @BeanProperty var event_id: String = _ - @BeanProperty var txn_id: String = _ + @BeanProperty var txn_id: String = _ // Versioning @BeanProperty var name_tracker: String = _ - @BeanProperty var v_tracker: String = _ - @BeanProperty var v_collector: String = _ - @BeanProperty var v_etl: String = _ + @BeanProperty var v_tracker: String = _ + @BeanProperty var v_collector: String = _ + @BeanProperty var v_etl: String = _ // User and visit - @BeanProperty var user_id: String = _ - @BeanProperty var user_ipaddress: String = _ - @BeanProperty var user_fingerprint: String = _ - @BeanProperty var domain_userid: String = _ + @BeanProperty var user_id: String = _ + @BeanProperty var user_ipaddress: String = _ + @BeanProperty var user_fingerprint: String = _ + @BeanProperty var domain_userid: String = _ @BeanProperty var domain_sessionidx: JInteger = _ - @BeanProperty var network_userid: String = _ + @BeanProperty var network_userid: String = _ // Location - @BeanProperty var geo_country: String = _ - @BeanProperty var geo_region: String = _ - @BeanProperty var geo_city: String = _ - @BeanProperty var geo_zipcode: String = _ - @BeanProperty var geo_latitude: JFloat = _ - @BeanProperty var geo_longitude: JFloat = _ + @BeanProperty var geo_country: String = _ + @BeanProperty var geo_region: String = _ + @BeanProperty var geo_city: String = _ + @BeanProperty var geo_zipcode: String = _ + @BeanProperty var geo_latitude: JFloat = _ + @BeanProperty var geo_longitude: JFloat = _ @BeanProperty var geo_region_name: String = _ // Other IP lookups - @BeanProperty var ip_isp: String = _ + @BeanProperty var ip_isp: String = _ @BeanProperty var ip_organization: String = _ - @BeanProperty var ip_domain: String = _ - @BeanProperty var ip_netspeed: String = _ + @BeanProperty var ip_domain: String = _ + @BeanProperty var ip_netspeed: String = _ // Page - @BeanProperty var page_url: String = _ - @BeanProperty var page_title: String = _ + @BeanProperty var page_url: String = _ + @BeanProperty var page_title: String = _ @BeanProperty var page_referrer: String = _ // Page URL components - @BeanProperty var page_urlscheme: String = _ - @BeanProperty var page_urlhost: String = _ - @BeanProperty var page_urlport: JInteger = _ - @BeanProperty var page_urlpath: String = _ - @BeanProperty var page_urlquery: String = _ + @BeanProperty var page_urlscheme: String = _ + @BeanProperty var page_urlhost: String = _ + @BeanProperty var page_urlport: JInteger = _ + @BeanProperty var page_urlpath: String = _ + @BeanProperty var page_urlquery: String = _ @BeanProperty var page_urlfragment: String = _ // Referrer URL components - @BeanProperty var refr_urlscheme: String = _ - @BeanProperty var refr_urlhost: String = _ - @BeanProperty var refr_urlport: JInteger = _ - @BeanProperty var refr_urlpath: String = _ - @BeanProperty var refr_urlquery: String = _ + @BeanProperty var refr_urlscheme: String = _ + @BeanProperty var refr_urlhost: String = _ + @BeanProperty var refr_urlport: JInteger = _ + @BeanProperty var refr_urlpath: String = _ + @BeanProperty var refr_urlquery: String = _ @BeanProperty var refr_urlfragment: String = _ // Referrer details @BeanProperty var refr_medium: String = _ @BeanProperty var refr_source: String = _ - @BeanProperty var refr_term: String = _ + @BeanProperty var refr_term: String = _ // Marketing - @BeanProperty var mkt_medium: String = _ - @BeanProperty var mkt_source: String = _ - @BeanProperty var mkt_term: String = _ - @BeanProperty var mkt_content: String = _ + @BeanProperty var mkt_medium: String = _ + @BeanProperty var mkt_source: String = _ + @BeanProperty var mkt_term: String = _ + @BeanProperty var mkt_content: String = _ @BeanProperty var mkt_campaign: String = _ // Custom Contexts @@ -119,8 +116,8 @@ class EnrichedEvent extends Serializable { // Structured Event @BeanProperty var se_category: String = _ - @BeanProperty var se_action: String = _ - @BeanProperty var se_label: String = _ + @BeanProperty var se_action: String = _ + @BeanProperty var se_label: String = _ @BeanProperty var se_property: String = _ @BeanProperty var se_value : String = _ // Technically should be a Double but may be rendered incorrectly by Cascading with scientific notification (which Redshift can't process) @@ -129,21 +126,21 @@ class EnrichedEvent extends Serializable { @BeanProperty var unstruct_event: String = _ // Ecommerce transaction (from querystring) - @BeanProperty var tr_orderid: String = _ + @BeanProperty var tr_orderid: String = _ @BeanProperty var tr_affiliation: String = _ - @BeanProperty var tr_total: String = _ - @BeanProperty var tr_tax: String = _ - @BeanProperty var tr_shipping: String = _ - @BeanProperty var tr_city: String = _ - @BeanProperty var tr_state: String = _ - @BeanProperty var tr_country: String = _ + @BeanProperty var tr_total: String = _ + @BeanProperty var tr_tax: String = _ + @BeanProperty var tr_shipping: String = _ + @BeanProperty var tr_city: String = _ + @BeanProperty var tr_state: String = _ + @BeanProperty var tr_country: String = _ // Ecommerce transaction item (from querystring) - @BeanProperty var ti_orderid: String = _ - @BeanProperty var ti_sku: String = _ - @BeanProperty var ti_name: String = _ - @BeanProperty var ti_category: String = _ - @BeanProperty var ti_price: String = _ + @BeanProperty var ti_orderid: String = _ + @BeanProperty var ti_sku: String = _ + @BeanProperty var ti_name: String = _ + @BeanProperty var ti_category: String = _ + @BeanProperty var ti_price: String = _ @BeanProperty var ti_quantity: JInteger = _ // Page Pings @@ -156,56 +153,56 @@ class EnrichedEvent extends Serializable { @BeanProperty var useragent: String = _ // Browser (from user-agent) - @BeanProperty var br_name: String = _ - @BeanProperty var br_family: String = _ - @BeanProperty var br_version: String = _ - @BeanProperty var br_type: String = _ + @BeanProperty var br_name: String = _ + @BeanProperty var br_family: String = _ + @BeanProperty var br_version: String = _ + @BeanProperty var br_type: String = _ @BeanProperty var br_renderengine: String = _ // Browser (from querystring) @BeanProperty var br_lang: String = _ // Individual feature fields for non-Hive targets (e.g. Infobright) - @BeanProperty var br_features_pdf: JByte = _ - @BeanProperty var br_features_flash: JByte = _ - @BeanProperty var br_features_java: JByte = _ - @BeanProperty var br_features_director: JByte = _ - @BeanProperty var br_features_quicktime: JByte = _ - @BeanProperty var br_features_realplayer: JByte = _ + @BeanProperty var br_features_pdf: JByte = _ + @BeanProperty var br_features_flash: JByte = _ + @BeanProperty var br_features_java: JByte = _ + @BeanProperty var br_features_director: JByte = _ + @BeanProperty var br_features_quicktime: JByte = _ + @BeanProperty var br_features_realplayer: JByte = _ @BeanProperty var br_features_windowsmedia: JByte = _ - @BeanProperty var br_features_gears: JByte = _ - @BeanProperty var br_features_silverlight: JByte = _ - @BeanProperty var br_cookies: JByte = _ - @BeanProperty var br_colordepth: String = _ - @BeanProperty var br_viewwidth: JInteger = _ - @BeanProperty var br_viewheight: JInteger = _ + @BeanProperty var br_features_gears: JByte = _ + @BeanProperty var br_features_silverlight: JByte = _ + @BeanProperty var br_cookies: JByte = _ + @BeanProperty var br_colordepth: String = _ + @BeanProperty var br_viewwidth: JInteger = _ + @BeanProperty var br_viewheight: JInteger = _ // OS (from user-agent) - @BeanProperty var os_name: String = _ - @BeanProperty var os_family: String = _ + @BeanProperty var os_name: String = _ + @BeanProperty var os_family: String = _ @BeanProperty var os_manufacturer: String = _ - @BeanProperty var os_timezone: String = _ + @BeanProperty var os_timezone: String = _ // Device/Hardware (from user-agent) - @BeanProperty var dvce_type: String = _ + @BeanProperty var dvce_type: String = _ @BeanProperty var dvce_ismobile: JByte = _ // Device (from querystring) - @BeanProperty var dvce_screenwidth: JInteger = _ + @BeanProperty var dvce_screenwidth: JInteger = _ @BeanProperty var dvce_screenheight: JInteger = _ // Document - @BeanProperty var doc_charset: String = _ - @BeanProperty var doc_width: JInteger = _ + @BeanProperty var doc_charset: String = _ + @BeanProperty var doc_width: JInteger = _ @BeanProperty var doc_height: JInteger = _ // Currency - @BeanProperty var tr_currency: String = _ - @BeanProperty var tr_total_base: String = _ - @BeanProperty var tr_tax_base: String = _ + @BeanProperty var tr_currency: String = _ + @BeanProperty var tr_total_base: String = _ + @BeanProperty var tr_tax_base: String = _ @BeanProperty var tr_shipping_base: String = _ - @BeanProperty var ti_currency: String = _ - @BeanProperty var ti_price_base: String = _ - @BeanProperty var base_currency: String = _ + @BeanProperty var ti_currency: String = _ + @BeanProperty var ti_price_base: String = _ + @BeanProperty var base_currency: String = _ // Geolocation @BeanProperty var geo_timezone: String = _ @@ -222,7 +219,7 @@ class EnrichedEvent extends Serializable { // Referer @BeanProperty var refr_domain_userid: String = _ - @BeanProperty var refr_dvce_tstamp: String = _ + @BeanProperty var refr_dvce_tstamp: String = _ // Derived contexts @BeanProperty var derived_contexts: String = _ @@ -234,9 +231,9 @@ class EnrichedEvent extends Serializable { @BeanProperty var derived_tstamp: String = _ // Derived event vendor/name/format/version - @BeanProperty var event_vendor: String = _ - @BeanProperty var event_name: String = _ - @BeanProperty var event_format: String = _ + @BeanProperty var event_vendor: String = _ + @BeanProperty var event_name: String = _ + @BeanProperty var event_format: String = _ @BeanProperty var event_version: String = _ // Event fingerprint diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ConversionUtils.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ConversionUtils.scala index f0e54c941..b5ec1df5b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ConversionUtils.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ConversionUtils.scala @@ -13,36 +13,26 @@ package com.snowplowanalytics.snowplow.enrich.common package utils -// Java -import java.net.URI -import java.net.URLDecoder -import java.net.URLEncoder -import java.lang.{Integer => JInteger} +import java.lang.{Byte => JByte, Integer => JInteger} import java.math.{BigDecimal => JBigDecimal} -import java.lang.{Byte => JByte} -import java.util.UUID +import java.net.{URI, URLDecoder, URLEncoder} +import java.nio.charset.Charset import java.nio.charset.StandardCharsets.UTF_8 +import java.util.UUID -// Scala import scala.collection.JavaConversions._ import scala.util.Try import scala.util.control.NonFatal -// Apache HTTP -import org.apache.http.client.utils.URLEncodedUtils - -// Apache Commons Codec -import org.apache.commons.codec.binary.Base64 - -// Scalaz -import scalaz._ -import Scalaz._ - -// Scala URI import io.lemonlabs.uri.{Uri, Url} import io.lemonlabs.uri.config.UriConfig import io.lemonlabs.uri.decoding.PercentDecoder import io.lemonlabs.uri.encoding.percentEncode +import org.apache.commons.codec.binary.Base64 +import org.apache.commons.lang3.exception.ExceptionUtils +import org.apache.http.client.utils.URLEncodedUtils +import scalaz._ +import Scalaz._ /** * General-purpose utils to help the @@ -57,14 +47,14 @@ object ConversionUtils { * components of a URI. */ case class UriComponents( - // Required - scheme: String, - host: String, - port: JInteger, - // Optional - path: Option[String], - query: Option[String], - fragment: Option[String]) + // Required + scheme: String, + host: String, + port: JInteger, + // Optional + path: Option[String], + query: Option[String], + fragment: Option[String]) /** * Explodes a URI into its 6 components @@ -83,13 +73,13 @@ object ConversionUtils { // TODO: should we be using decodeString below instead? // Trouble is we can't be sure of the querystring's encoding. - val query = fixTabsNewlines(uri.getRawQuery) - val path = fixTabsNewlines(uri.getRawPath) + val query = fixTabsNewlines(uri.getRawQuery) + val path = fixTabsNewlines(uri.getRawPath) val fragment = fixTabsNewlines(uri.getRawFragment) UriComponents( scheme = uri.getScheme, - host = uri.getHost, + host = uri.getHost, port = if (port == -1 && uri.getScheme == "https") { 443 } else if (port == -1) { @@ -97,8 +87,8 @@ object ConversionUtils { } else { port }, - path = path, - query = query, + path = path, + query = query, fragment = fragment ) } @@ -157,7 +147,7 @@ object ConversionUtils { def decodeBase64Url(field: String, str: String): Validation[String, String] = try { val decodedBytes = UrlSafeBase64.decode(str) - val result = new String(decodedBytes, UTF_8) // Must specify charset (EMR uses US_ASCII) + val result = new String(decodedBytes, UTF_8) // Must specify charset (EMR uses US_ASCII) result.success } catch { case NonFatal(e) => @@ -192,10 +182,10 @@ object ConversionUtils { val validateUuid: (String, String) => ValidatedString = (field, str) => { def check(s: String)(u: UUID): Boolean = (u != null && s.toLowerCase == u.toString) - val uuid = Try(UUID.fromString(str)).toOption.filter(check(str)) + val uuid = Try(UUID.fromString(str)).toOption.filter(check(str)) uuid match { case Some(_) => str.toLowerCase.success - case None => s"Field [$field]: [$str] is not a valid UUID".fail + case None => s"Field [$field]: [$str] is not a valid UUID".fail } } @@ -238,18 +228,18 @@ object ConversionUtils { * @return a Scalaz Validation, wrapping either * an error String or the decoded String */ - val decodeString: (String, String, String) => ValidatedString = (enc, field, str) => + val decodeString: (Charset, String, String) => ValidatedString = (enc, field, str) => try { // TODO: switch to style of fixTabsNewlines above // TODO: potentially switch to using fixTabsNewlines too to avoid duplication val s = Option(str).getOrElse("") - val d = URLDecoder.decode(s, enc) + val d = URLDecoder.decode(s, enc.toString) val r = d.replaceAll("(\\r|\\n)", "").replaceAll("\\t", " ") r.success } catch { case NonFatal(e) => "Field [%s]: Exception URL-decoding [%s] (encoding [%s]): [%s]".format(field, str, enc, e.getMessage).fail - } + } /** * On 17th August 2013, Amazon made an @@ -298,7 +288,7 @@ object ConversionUtils { * an error String or the decoded String */ def doubleDecode(field: String, str: String): ValidatedString = - ConversionUtils.decodeString("UTF-8", field, singleEncodePcts(str)) + ConversionUtils.decodeString(UTF_8, field, singleEncodePcts(str)) /** * Encodes a string in the specified encoding @@ -348,7 +338,7 @@ object ConversionUtils { * @param uri URI containing the querystring * @param encoding Encoding of the URI */ - def extractQuerystring(uri: URI, encoding: String): Validation[String, Map[String, String]] = + def extractQuerystring(uri: URI, encoding: Charset): Validation[String, Map[String, String]] = Try(URLEncodedUtils.parse(uri, encoding).map(p => (p.getName -> p.getValue))).recoverWith { case NonFatal(_) => Try(Url.parse(uri.toString).query.params).map(l => l.map(t => (t._1, t._2.getOrElse("")))) @@ -486,7 +476,7 @@ object ConversionUtils { str match { case "1" => (1.toByte: JByte).success case "0" => (0.toByte: JByte).success - case _ => "Field [%s]: cannot convert [%s] to Boolean-like JByte".format(field, str).fail + case _ => "Field [%s]: cannot convert [%s] to Boolean-like JByte".format(field, str).fail } /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/HttpClient.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/HttpClient.scala index 6d63e5cd1..fc1467c48 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/HttpClient.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/HttpClient.scala @@ -10,19 +10,14 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package utils +package com.snowplowanalytics.snowplow.enrich.common.utils import scala.util.control.NonFatal -// Scalaz +import scalaj.http._ import scalaz._ import Scalaz._ -// Scalaj -import scalaj.http._ - object HttpClient { // The defaults are from scalaj library diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonPath.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonPath.scala index 18f44b95b..0623e497f 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonPath.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonPath.scala @@ -12,17 +12,12 @@ */ package com.snowplowanalytics.snowplow.enrich.common.utils -// Scalaz +import io.gatling.jsonpath.{JsonPath => GatlingJsonPath} import scalaz._ import Scalaz._ - -// Json4s import org.json4s._ import org.json4s.jackson.JsonMethods.mapper -// Gatling JsonPath -import io.gatling.jsonpath.{JsonPath => GatlingJsonPath} - /** * Wrapper for `io.gatling.jsonpath` for `json4s` and `scalaz` */ @@ -40,7 +35,7 @@ object JsonPath { def convertToJValue(json: JValue): Validation[String, Object] = json match { case JNothing => "JSONPath error: Nothing was given".failure - case other => json4sMapper.convertValue(other, classOf[Object]).success + case other => json4sMapper.convertValue(other, classOf[Object]).success } /** @@ -53,7 +48,7 @@ object JsonPath { def json4sQuery(json: JValue): List[JValue] = convertToJValue(json) match { case Success(pojo) => jsonPath.query(pojo).map(anyToJValue).toList - case Failure(_) => Nil + case Failure(_) => Nil } } @@ -66,7 +61,7 @@ object JsonPath { convertToJValue(json).flatMap { pojo => GatlingJsonPath.query(jsonPath, pojo) match { case Right(iterator) => iterator.map(anyToJValue).toList.success - case Left(error) => error.reason.fail + case Left(error) => error.reason.fail } } @@ -74,22 +69,22 @@ object JsonPath { * Precompile JsonPath query * * @param query JsonPath query as a string - * @return valid [[JsonPath]] object either error message + * @return valid JsonPath object either error message */ def compileQuery(query: String): Validation[String, GatlingJsonPath] = GatlingJsonPath.compile(query).leftMap(_.reason).disjunction.validation /** * Wrap list of values into JSON array if several values present - * Use in conjunction with `query`. [[JNothing]] will represent absent value + * Use in conjunction with `query`. JNothing will represent absent value * * @param values list of JSON values * @return array if there's >1 values in list */ def wrapArray(values: List[JValue]): JValue = values match { - case Nil => JNothing + case Nil => JNothing case one :: Nil => one - case many => JArray(many) + case many => JArray(many) } /** diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonUtils.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonUtils.scala index 911015b68..9e5d87125 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonUtils.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/JsonUtils.scala @@ -10,35 +10,20 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package utils +package com.snowplowanalytics.snowplow.enrich.common.utils -// Java import java.math.{BigInteger => JBigInteger} -import java.net.URLEncoder -// Scala import scala.util.control.NonFatal -// Jackson import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper} - -// Joda-Time import org.joda.time.{DateTime, DateTimeZone} import org.joda.time.format.{DateTimeFormat, DateTimeFormatter} - -// Scalaz import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// This project -import utils.{ConversionUtils => CU} - /** * Contains general purpose extractors and other * utilities for JSONs. Jackson-based. @@ -63,7 +48,9 @@ object JsonUtils { * validates it as correct JSON. */ val extractBase64EncJson: (String, String) => Validation[String, String] = (field, str) => - CU.decodeBase64Url(field, str).flatMap(json => validateAndReformatJson(field, json)) + ConversionUtils + .decodeBase64Url(field, str) + .flatMap(json => validateAndReformatJson(field, json)) /** * Converts a Joda DateTime into @@ -91,9 +78,9 @@ object JsonUtils { * original String */ private[utils] def booleanToJValue(str: String): JValue = str match { - case "true" => JBool(true) + case "true" => JBool(true) case "false" => JBool(false) - case _ => JString(str) + case _ => JString(str) } /** @@ -157,18 +144,19 @@ object JsonUtils { * @return a JField, containing the original key and the * processed String, now as a JValue */ - def toJField(key: String, - value: String, - bools: List[String], - ints: List[String], - dateTimes: DateTimeFields): JField = { + def toJField( + key: String, + value: String, + bools: List[String], + ints: List[String], + dateTimes: DateTimeFields): JField = { val v = (value, dateTimes) match { - case ("", _) => JNull - case _ if bools.contains(key) => booleanToJValue(value) - case _ if ints.contains(key) => integerToJValue(value) + case ("", _) => JNull + case _ if bools.contains(key) => booleanToJValue(value) + case _ if ints.contains(key) => integerToJValue(value) case (_, Some((nel, fmt))) if nel.toList.contains(key) => JString(toJsonSchemaDateTime(value, fmt)) - case _ => JString(value) + case _ => JString(value) } (key, v) } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/MapTransformer.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/MapTransformer.scala index 2d5e55a9b..a01b9e273 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/MapTransformer.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/MapTransformer.scala @@ -13,10 +13,8 @@ package com.snowplowanalytics.snowplow.enrich.common package utils -// Java import java.lang.reflect.Method -// Scalaz import scalaz._ import Scalaz._ @@ -62,7 +60,7 @@ import Scalaz._ object MapTransformer { // Clarificatory aliases - type Key = String + type Key = String type Value = String type Field = String @@ -92,7 +90,7 @@ object MapTransformer { */ def generate[T <: AnyRef](sourceMap: SourceMap, transformMap: TransformMap)(implicit m: Manifest[T]): Validated[T] = { val newInst = m.runtimeClass.newInstance() - val result = _transform(newInst, sourceMap, transformMap, getSetters(m.runtimeClass)) + val result = _transform(newInst, sourceMap, transformMap, getSetters(m.runtimeClass)) result.flatMap(s => newInst.asInstanceOf[T].success) // On success, replace the field count with the new instance } @@ -150,16 +148,17 @@ object MapTransformer { * of error Strings, or the count of * updated fields */ - private def _transform[T](obj: T, - sourceMap: SourceMap, - transformMap: TransformMap, - setters: SettersMap): ValidationNel[String, Int] = { + private def _transform[T]( + obj: T, + sourceMap: SourceMap, + transformMap: TransformMap, + setters: SettersMap): ValidationNel[String, Int] = { val results: List[Validation[String, Int]] = sourceMap.map { case (key, in) => if (transformMap.contains(key)) { val (func, field) = transformMap(key) - val out = func(key, in) + val out = func(key, in) out match { case Success(s) => diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ScalazJson4sUtils.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ScalazJson4sUtils.scala index c761f019a..abf07b885 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ScalazJson4sUtils.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/ScalazJson4sUtils.scala @@ -10,25 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package utils -// Scalaz -import org.json4s.Formats - +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import org.json4s.{Formats, JNothing, JValue, MappingException} +import org.json4s.JsonDSL._ import scalaz._ import Scalaz._ -// json4s -import org.json4s.{DefaultFormats, JNothing, JValue, MappingException} -import org.json4s.JsonDSL._ - -// Iglu -import iglu.client.validation.ProcessingMessageMethods._ - object ScalazJson4sUtils { /** @@ -76,6 +66,6 @@ object ScalazJson4sUtils { def fieldExists(config: JValue, head: String, tail: String*): Boolean = (head +: tail).foldLeft(config)(_ \ _) match { case JNothing => false - case s => true + case s => true } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/Shredder.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/Shredder.scala index 24139cb96..4f1e04f88 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/Shredder.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/Shredder.scala @@ -10,33 +10,22 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package utils package shredder -// Jackson -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.node.ObjectNode - -// Scala import scala.collection.JavaConversions._ -// Scalaz +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.node.ObjectNode +import com.snowplowanalytics.iglu.client.{JsonSchemaPair, Resolver, SchemaCriterion} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import com.snowplowanalytics.iglu.client.validation.ValidatableJsonMethods._ import scalaz._ import Scalaz._ -// Snowplow Common Enrich -import common._ import outputs.EnrichedEvent -// Iglu Scala Client -import iglu.client.{JsonSchemaPair, Resolver, SchemaCriterion} -import iglu.client.validation.ProcessingMessageMethods._ -import iglu.client.validation.ValidatableJsonMethods._ - /** * The shredder takes the two fields containing JSONs * (contexts and unstructured event properties) and @@ -89,7 +78,7 @@ object Shredder { // Get our unstructured event and Lists of contexts and derived_contexts val ue = extractAndValidateUnstructEvent(event) - val c = extractAndValidateCustomContexts(event) + val c = extractAndValidateCustomContexts(event) val dc = extractAndValidateDerivedContexts(event) // Joining all validated JSONs into a single validated List[JsonNode], collecting Failures too @@ -103,7 +92,7 @@ object Shredder { } /** - * Extract unstruct event out of [[EnrichedEvent]] and validate against it's schema + * Extract unstruct event out of EnrichedEvent and validate against it's schema * * @param event The Snowplow enriched event to find unstruct event in * @param resolver iglu resolver @@ -212,7 +201,7 @@ object Shredder { */ private[shredder] def flatten(o: Option[ValidatedNelMessage[JsonNodes]]): ValidatedNelMessage[JsonNodes] = o match { case Some(vjl) => vjl - case None => List[JsonNode]().success + case None => List[JsonNode]().success } /** @@ -227,11 +216,11 @@ object Shredder { */ private[shredder] def makePartialHierarchy(rootId: String, rootTstamp: String): TypeHierarchy = TypeHierarchy( - rootId = rootId, + rootId = rootId, rootTstamp = rootTstamp, - refRoot = TypeHierarchyRoot, - refTree = List(TypeHierarchyRoot), // This is a partial tree. Need to complete later - refParent = TypeHierarchyRoot // Hardcode as nested shredding not supported yet + refRoot = TypeHierarchyRoot, + refTree = List(TypeHierarchyRoot), // This is a partial tree. Need to complete later + refParent = TypeHierarchyRoot // Hardcode as nested shredding not supported yet ) /** @@ -273,7 +262,7 @@ object Shredder { // below structure. val updated = instance.asInstanceOf[ObjectNode] updated.replace("schema", schemaNode) - updated.put("hierarchy", hierarchyNode) + updated.set("hierarchy", hierarchyNode) (schemaKey, updated) } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/TypeHierarchy.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/TypeHierarchy.scala index e676a792b..f57c14b9b 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/TypeHierarchy.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/shredder/TypeHierarchy.scala @@ -10,23 +10,11 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package utils -package shredder +package com.snowplowanalytics.snowplow.enrich.common.utils.shredder -// Jackson -import com.github.fge.jackson.JacksonUtils import com.fasterxml.jackson.databind.JsonNode - -// Scala -import scala.collection.JavaConversions._ - -// Scalaz +import com.github.fge.jackson.JacksonUtils import scalaz._ -import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ @@ -66,11 +54,11 @@ case class TypeHierarchy( * @return the TypeHierarchy as a json4s JValue */ def toJValue: JValue = - ("rootId" -> rootId) ~ + ("rootId" -> rootId) ~ ("rootTstamp" -> rootTstamp) ~ - ("refRoot" -> refRoot) ~ - ("refTree" -> refTree) ~ - ("refParent" -> refParent) + ("refRoot" -> refRoot) ~ + ("refTree" -> refTree) ~ + ("refParent" -> refParent) /** * Completes a partial TypeHierarchy with @@ -93,7 +81,7 @@ case class TypeHierarchy( Lens.lensu((ph, rt) => { val full = ph.refTree ++ rt ph.copy( - refTree = full, + refTree = full, refParent = secondTail(full) ) }, _.refTree) @@ -108,8 +96,8 @@ case class TypeHierarchy( */ private def secondTail[A](ls: List[A]): A = ls match { case h :: _ :: Nil => h - case _ :: tail => secondTail(tail) - case _ => throw new NoSuchElementException + case _ :: tail => secondTail(tail) + case _ => throw new NoSuchElementException } } diff --git a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/package.scala b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/package.scala index 40ca685ae..352becdec 100644 --- a/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/package.scala +++ b/modules/common/src/main/scala/com.snowplowanalytics.snowplow.enrich/package.scala @@ -12,23 +12,13 @@ */ package com.snowplowanalytics.snowplow.enrich -// Java import java.lang.{Integer => JInteger} -// Scalaz -import scalaz._ -import Scalaz._ - -// Apache URLEncodedUtils -import org.apache.http.NameValuePair - -// JSON Schema import com.github.fge.jsonschema.core.report.ProcessingMessage - -// Iglu import com.snowplowanalytics.iglu.client.JsonSchemaPair +import org.apache.http.NameValuePair +import scalaz._ -// This project import common.loaders.CollectorPayload import common.adapters.RawEvent import common.outputs.EnrichedEvent @@ -151,8 +141,8 @@ package object common { type JsonSchemaPairs = List[JsonSchemaPair] /** - * Type alias for either [[Throwable]] or successful value - * It has [[Monad]] instance unlike [[Validation]] + * Type alias for either Throwable or successful value + * It has Monad instance unlike Validation */ type ThrowableXor[+A] = Throwable \/ A } diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/SpecHelpers.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/SpecHelpers.scala index 59ce089d8..4dd266647 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/SpecHelpers.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/SpecHelpers.scala @@ -10,25 +10,14 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common -// Apache URLEncodedUtils +import com.snowplowanalytics.iglu.client.Resolver import org.apache.http.NameValuePair import org.apache.http.message.BasicNameValuePair -// Iglu Scala Client -import iglu.client.Resolver - -// This project import utils.JsonUtils -// Scalaz -import scalaz._ -import Scalaz._ - object SpecHelpers { // Internal diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/AdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/AdapterSpec.scala index df9c9a526..e96a44023 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/AdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/AdapterSpec.scala @@ -10,37 +10,24 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Iglu -import iglu.client.Resolver - -// Joda-Time +import com.snowplowanalytics.iglu.client.Resolver import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// Snowplow import SpecHelpers._ import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class AdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the Adapter trait's functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CallrailAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CallrailAdapterSpec.scala index cb1132fcf..f9de8fce6 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CallrailAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CallrailAdapterSpec.scala @@ -14,22 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class CallrailAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the CallrailAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CloudfrontAccessLogAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CloudfrontAccessLogAdapterSpec.scala index f13480878..ed7721b88 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CloudfrontAccessLogAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/CloudfrontAccessLogAdapterSpec.scala @@ -14,23 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource, TsvLoader} -import utils.ConversionUtils import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class CloudfrontAccessLogAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the CloudfrontAccessLogAdapter functionality @@ -395,7 +388,7 @@ class CloudfrontAccessLogAdapterSpec extends Specification with DataTables with CollectorPayload(Shared.api, params, None, - "a\tb\tc\td\te\tf\tg\th\ti\t$url\tk\t$doubleEncodedQs".some, + s"a\tb\tc\td\te\tf\tg\th\ti\t$url\tk\t$doubleEncodedQs".some, Shared.source, Shared.context) val actual = CloudfrontAccessLogAdapter.WebDistribution.toRawEvents(payload) diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/GoogleAnalyticsAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/GoogleAnalyticsAdapterSpec.scala index e439010ce..b6f41331f 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/GoogleAnalyticsAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/GoogleAnalyticsAdapterSpec.scala @@ -15,19 +15,13 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz -import scalaz._ -import Scalaz._ - -// Specs2 import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.{DisjunctionMatchers, ValidationMatchers} +import scalaz._ +import Scalaz._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} import GoogleAnalyticsAdapter._ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/HubSpotAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/HubSpotAdapterSpec.scala index c827721c3..d2809d4d3 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/HubSpotAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/HubSpotAdapterSpec.scala @@ -14,28 +14,15 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils -import SpecHelpers._ - -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers class HubSpotAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/IgluAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/IgluAdapterSpec.scala index c9333f285..4eac02c35 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/IgluAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/IgluAdapterSpec.scala @@ -14,26 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class IgluAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the IgluAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailchimpAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailchimpAdapterSpec.scala index 5090d2b49..067b83dd8 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailchimpAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailchimpAdapterSpec.scala @@ -14,33 +14,17 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{Specification, ScalaCheck} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ - -// json4s import org.json4s._ import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow -import loaders.{ - CollectorApi, - CollectorSource, - CollectorContext, - CollectorPayload -} -import utils.ConversionUtils -import SpecHelpers._ -// Specs2 -import org.specs2.{Specification, ScalaCheck} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers +import loaders._ +import SpecHelpers._ class MailchimpAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the MailchimpAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailgunAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailgunAdapterSpec.scala index c23a7d6b6..ee5586fdc 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailgunAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MailgunAdapterSpec.scala @@ -14,30 +14,15 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils -import SpecHelpers._ - -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class MailgunAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the MailgunAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MandrillAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MandrillAdapterSpec.scala index e40f6f39b..7316fc41a 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MandrillAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MandrillAdapterSpec.scala @@ -14,24 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class MandrillAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the MandrillAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MarketoAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MarketoAdapterSpec.scala index 5faa0ab3c..1fda7fa3f 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MarketoAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/MarketoAdapterSpec.scala @@ -14,28 +14,14 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils -import SpecHelpers._ - -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers class MarketoAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/OlarkAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/OlarkAdapterSpec.scala index e867e8c36..1b7471ebd 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/OlarkAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/OlarkAdapterSpec.scala @@ -14,30 +14,15 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils -import SpecHelpers._ - -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class OlarkAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the OlarkAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PagerdutyAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PagerdutyAdapterSpec.scala index 3bd9027d4..2e906f2bb 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PagerdutyAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PagerdutyAdapterSpec.scala @@ -14,33 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{Specification, ScalaCheck} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow -import loaders.{ - CollectorApi, - CollectorSource, - CollectorContext, - CollectorPayload -} -import utils.ConversionUtils -import SpecHelpers._ -// Specs2 -import org.specs2.{Specification, ScalaCheck} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers +import loaders._ class PagerdutyAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the PagerdutyAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PingdomAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PingdomAdapterSpec.scala index d213020a6..9d1a93ccb 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PingdomAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/PingdomAdapterSpec.scala @@ -14,29 +14,18 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ - -// json4s import org.json4s._ -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class PingdomAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the PingdomAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SendgridAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SendgridAdapterSpec.scala index 9516c3166..ff4049b14 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SendgridAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SendgridAdapterSpec.scala @@ -14,29 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz -import scalaz.Scalaz._ -import scalaz._ - -// Snowplow -import com.snowplowanalytics.snowplow.enrich.common.loaders.{ - CollectorApi, - CollectorContext, - CollectorPayload, - CollectorSource -} - -// Specs2 import org.specs2.mutable.Specification import org.specs2.scalaz.ValidationMatchers - -// json4s +import scalaz._ +import Scalaz._ import org.json4s._ import org.json4s.jackson.JsonMethods._ +import loaders._ + class SendgridAdapterSpec extends Specification with ValidationMatchers { implicit val resolver = SpecHelpers.IgluResolver diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/StatusGatorAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/StatusGatorAdapterSpec.scala index 4f41e30e7..2b83364d0 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/StatusGatorAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/StatusGatorAdapterSpec.scala @@ -14,30 +14,15 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils -import SpecHelpers._ - -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class StatusGatorAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the StatusgatorAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UnbounceAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UnbounceAdapterSpec.scala index 2ecdfb846..10d4249c2 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UnbounceAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UnbounceAdapterSpec.scala @@ -14,30 +14,16 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} - -import utils.ConversionUtils import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class UnbounceAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the UnbounceAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UrbanAirshipAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UrbanAirshipAdapterSpec.scala index b3077efe9..912dd0bda 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UrbanAirshipAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/UrbanAirshipAdapterSpec.scala @@ -10,29 +10,19 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common.adapters.registry - -import com.snowplowanalytics.snowplow.enrich.common.SpecHelpers -import com.snowplowanalytics.snowplow.enrich.common.loaders.{ - CollectorApi, - CollectorContext, - CollectorPayload, - CollectorSource -} -import org.joda.time.DateTime +package com.snowplowanalytics.snowplow.enrich.common +package adapters.registry -// Scalaz +import org.joda.time.DateTime import scalaz.Scalaz._ import scalaz._ - -// Specs2 import org.specs2.mutable.Specification import org.specs2.scalaz.ValidationMatchers - -// json4s import org.json4s._ import org.json4s.jackson.JsonMethods._ +import loaders._ + class UrbanAirshipAdapterSpec extends Specification with ValidationMatchers { implicit val resolver = SpecHelpers.IgluResolver diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/VeroAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/VeroAdapterSpec.scala index 2206a73c6..ff5feda6f 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/VeroAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/VeroAdapterSpec.scala @@ -14,28 +14,14 @@ package com.snowplowanalytics.snowplow.enrich.common package adapters package registry -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// json4s -import org.json4s._ -import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -import org.json4s.scalaz.JsonScalaz._ - -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import utils.ConversionUtils -import SpecHelpers._ - -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers class VeroAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SnowplowAdapterSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/snowplow/SnowplowAdapterSpec.scala similarity index 99% rename from modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SnowplowAdapterSpec.scala rename to modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/snowplow/SnowplowAdapterSpec.scala index f4290913c..7b11c2a5a 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/SnowplowAdapterSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/adapters/registry/snowplow/SnowplowAdapterSpec.scala @@ -15,23 +15,17 @@ package adapters package registry package snowplow -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// Snowplow import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} import utils.{ConversionUtils => CU} import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - class SnowplowAdapterSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the SnowplowAdapter functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/EnrichmentRegistrySpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/EnrichmentRegistrySpec.scala index 4b55adc2e..1d4840bf3 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/EnrichmentRegistrySpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/EnrichmentRegistrySpec.scala @@ -10,20 +10,20 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments import java.net.URI -import com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.Enrichment import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers +import registry.Enrichment + case class NoFileEnrichment() extends Enrichment case class FileEnrichment(files: List[(URI, String)]) extends Enrichment { - override def filesToCache(): List[(URI, String)] = files + override def filesToCache: List[(URI, String)] = files } class EnrichmentRegistrySpec extends Specification with DataTables with ValidationMatchers { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/SchemaEnrichmentTest.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/SchemaEnrichmentTest.scala index 5fe1e885d..a61d2cda3 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/SchemaEnrichmentTest.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/SchemaEnrichmentTest.scala @@ -10,25 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Iglu import com.snowplowanalytics.iglu.client.SchemaKey - -// Common -import outputs.EnrichedEvent -import enrichments.SchemaEnrichment._ - -// Specs2 import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers -// Scalaz -import scalaz._ -import Scalaz._ +import outputs.EnrichedEvent class SchemaEnrichmentTest extends Specification with DataTables with ValidationMatchers { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/clientEnrichmentSpecs.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/clientEnrichmentSpecs.scala index 754791d46..8c0d705f8 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/clientEnrichmentSpecs.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/clientEnrichmentSpecs.scala @@ -13,18 +13,11 @@ package com.snowplowanalytics.snowplow.enrich.common package enrichments -// Specs2 import org.specs2.Specification import org.specs2.matcher.DataTables -import org.specs2.scalaz._ - -// Scalaz import scalaz._ import Scalaz._ -/** - * Tests the extractViewDimensions function - */ class ExtractViewDimensionsSpec extends Specification with DataTables { val FieldName = "res" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/eventEnrichmentSpecs.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/eventEnrichmentSpecs.scala index 465d65f16..da46a5ea8 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/eventEnrichmentSpecs.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/eventEnrichmentSpecs.scala @@ -10,19 +10,13 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments +package com.snowplowanalytics.snowplow.enrich.common.enrichments -// Specs2 +import org.joda.time.DateTime +import org.joda.time.DateTimeZone import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers - -// Joda -import org.joda.time.DateTime -import org.joda.time.DateTimeZone - -// Scalaz import scalaz._ import Scalaz._ @@ -76,10 +70,10 @@ class ExtractEventTypeSpec extends Specification with DataTables with Validation } def e4 = - "SPEC NAME" || "INPUT VAL" | "EXPECTED OUTPUT" | - "Not long" !! ("f", "v") ! "Field [f]: [v] is not in the expected format (ms since epoch)".fail | - "Too long" !! ("f", "1111111111111111") ! "Field [f]: [1111111111111111] is formatted as [37179-09-17 07:18:31.111] which isn't Redshift-compatible".fail | - "Valid ts" !! ("f", "1") ! "1970-01-01 00:00:00.001".success |> { (_, input, expected) => + "SPEC NAME" || "INPUT VAL" | "EXPECTED OUTPUT" | + "Not long" !! (("f", "v")) ! "Field [f]: [v] is not in the expected format (ms since epoch)".fail | + "Too long" !! (("f", "1111111111111111")) ! "Field [f]: [1111111111111111] is formatted as [37179-09-17 07:18:31.111] which isn't Redshift-compatible".fail | + "Valid ts" !! (("f", "1")) ! "1970-01-01 00:00:00.001".success |> { (_, input, expected) => EventEnrichments.extractTimestamp(input._1, input._2) must_== (expected) } } diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/miscEnrichmentSpecs.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/miscEnrichmentSpecs.scala index 12f9e0325..d72bd6722 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/miscEnrichmentSpecs.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/miscEnrichmentSpecs.scala @@ -10,28 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments +package com.snowplowanalytics.snowplow.enrich.common.enrichments -// Specs2 & ScalaCheck import org.specs2.mutable.{Specification => MutSpecification} import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables -import org.scalacheck._ - -// Scalaz import scalaz._ import Scalaz._ - -// json4s -import org.json4s._ import org.json4s.JsonDSL._ -import org.json4s.jackson.JsonMethods._ -/** - * Tests the etlVersion variable. - * Uses mutable.Specification. - */ class EtlVersionSpec extends MutSpecification { "The ETL version" should { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/AnonIpEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/AnonIpEnrichmentSpec.scala index 02b997da3..659c97f89 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/AnonIpEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/AnonIpEnrichmentSpec.scala @@ -10,9 +10,7 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CampaignAttributionEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CampaignAttributionEnrichmentSpec.scala index 3006ee88e..6246c6717 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CampaignAttributionEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CampaignAttributionEnrichmentSpec.scala @@ -10,21 +10,11 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers -// Scalaz -import scalaz._ -import Scalaz._ - -/** - * Tests CampaignAttributionEnrichment - */ class CampaignAttributionEnrichmentSpec extends Specification with ValidationMatchers { def is = s2""" This is a specification to test the CampaignAttributionEnrichment diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CookieExtractorEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CookieExtractorEnrichmentSpec.scala index 670b654eb..a9df50f84 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CookieExtractorEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CookieExtractorEnrichmentSpec.scala @@ -9,22 +9,11 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Specs2 import org.specs2.Specification import org.specs2.scalaz._ - -// Scalaz -import scalaz._ -import Scalaz._ - -// Json4s import org.json4s._ -import org.json4s.JValue -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ class CookieExtractorEnrichmentSpec extends Specification with ValidationMatchers { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CurrencyConversionEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CurrencyConversionEnrichmentSpec.scala index 557f0c5db..7014f21fb 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CurrencyConversionEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/CurrencyConversionEnrichmentSpec.scala @@ -10,25 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Scala-Forex import com.snowplowanalytics.forex.oerclient.DeveloperAccount - -// Specs2 +import org.joda.time.DateTime import org.specs2.Specification import org.specs2.matcher.DataTables -import org.specs2.scalaz._ - -// Scalaz import scalaz._ import Scalaz._ -// Joda Time -import org.joda.time.DateTime - object CurrencyConversionEnrichmentSpec { val OerApiKey = "OER_KEY" } diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EnrichmentConfigsSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EnrichmentConfigsSpec.scala index 13a9249ef..83c349bc8 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EnrichmentConfigsSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EnrichmentConfigsSpec.scala @@ -10,44 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package enrichments -package registry - -// Java -import java.net.URI -import java.lang.{Byte => JByte} +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -import com.snowplowanalytics.iglu.client.SchemaCriterion -import org.specs2.matcher.DataTables +import java.net.URI -// Apache Commons Codec +import com.snowplowanalytics.forex.oerclient.DeveloperAccount +import com.snowplowanalytics.iglu.client.SchemaKey import org.apache.commons.codec.binary.Base64 - -// Scalaz -import scalaz._ -import Scalaz._ - -// json4s import org.json4s.jackson.JsonMethods.parse - -// Iglu -import com.snowplowanalytics.iglu.client.SchemaKey -import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ - -// Scala-Forex -import com.snowplowanalytics.forex.oerclient.DeveloperAccount - -// Specs2 +import org.specs2.matcher.DataTables import org.specs2.mutable.Specification import org.specs2.scalaz.ValidationMatchers -/** - * Tests enrichmentConfigs - */ class EnrichmentConfigsSpec extends Specification with ValidationMatchers with DataTables { "Parsing a valid anon_ip enrichment JSON" should { @@ -107,12 +81,12 @@ class EnrichmentConfigsSpec extends Specification with ValidationMatchers with D val schemaKey = SchemaKey("com.snowplowanalytics.snowplow", "ip_lookups", "jsonschema", "2-0-0") val expected = IpLookupsEnrichment( - Some("geo", + Some(("geo", new URI("http://snowplow-hosted-assets.s3.amazonaws.com/third-party/maxmind/GeoIP2-City.mmdb"), - "GeoIP2-City.mmdb"), - Some("isp", + "GeoIP2-City.mmdb")), + Some(("isp", new URI("http://snowplow-hosted-assets.s3.amazonaws.com/third-party/maxmind/GeoIP2-ISP.mmdb"), - "GeoIP2-ISP.mmdb"), + "GeoIP2-ISP.mmdb")), None, None, true diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EventFingerprintEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EventFingerprintEnrichmentSpec.scala index 777dc8120..0499e018c 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EventFingerprintEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/EventFingerprintEnrichmentSpec.scala @@ -10,21 +10,11 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers -// Scalaz -import scalaz._ -import Scalaz._ - -/** - * Tests EventFingerprintEnrichment - */ class EventFingerprintEnrichmentSpec extends Specification with ValidationMatchers { def is = s2""" This is a specification to test the EventFingerprintEnrichment diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/HttpHeaderExtractorEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/HttpHeaderExtractorEnrichmentSpec.scala index 5f5f840db..5fd78c97c 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/HttpHeaderExtractorEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/HttpHeaderExtractorEnrichmentSpec.scala @@ -9,23 +9,12 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Specs2 -import org.specs2.Specification -import org.specs2.scalaz._ - -// Scalaz -import scalaz._ -import Scalaz._ - -// Json4s import org.json4s._ -import org.json4s.JValue -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ +import org.specs2.Specification +import org.specs2.scalaz._ class HttpHeaderExtractorEnrichmentSpec extends Specification with ValidationMatchers { def is = s2""" @@ -55,9 +44,10 @@ class HttpHeaderExtractorEnrichmentSpec extends Specification with ValidationMat } def e3 = { - val expected = List() + val expected = List.empty[String] - HttpHeaderExtractorEnrichment(".*").extract(Nil).map(h => compact(render(h))) must_== expected.map(e => - compact(render(parse(e)))) + HttpHeaderExtractorEnrichment(".*") + .extract(Nil) + .map(h => compact(render(h))) must_== expected.map(e => compact(render(parse(e)))) } } diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IabEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IabEnrichmentSpec.scala index 219bb3a15..35ffc26e5 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IabEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IabEnrichmentSpec.scala @@ -10,29 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Java import java.net.URI -// joda-time import org.joda.time.DateTime - -// json4s import org.json4s.jackson.JsonMethods.parse - -// iglu -import iglu.client.SchemaKey - -// Specs2, Scalaz-Specs2 & ScalaCheck +import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers -import org.specs2.{ScalaCheck, Specification} - -// Scalaz import scalaz._ class IabEnrichmentSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IpLookupsEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IpLookupsEnrichmentSpec.scala index 8d94a7a20..91598c513 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IpLookupsEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/IpLookupsEnrichmentSpec.scala @@ -10,29 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Java import java.net.URI -// Specs2, Scalaz-Specs2 & ScalaCheck +import com.snowplowanalytics.maxmind.iplookups.model.IpLocation import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers -import org.scalacheck._ -import org.scalacheck.Arbitrary._ - -// Scalaz import scalaz._ import Scalaz._ -// Scala MaxMind GeoIP -import maxmind.iplookups.IpLookups -import maxmind.iplookups.model.IpLocation - class IpLookupsEnrichmentSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the IpLookupsEnrichment diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/JavascriptScriptEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/JavascriptScriptEnrichmentSpec.scala index fa03936e6..ea5951e7c 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/JavascriptScriptEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/JavascriptScriptEnrichmentSpec.scala @@ -11,26 +11,15 @@ * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package enrichments.registry -// Scalaz -import scalaz._ -import Scalaz._ - -// Json4s +import org.specs2.Specification +import org.specs2.scalaz.ValidationMatchers import org.json4s._ -import org.json4s.JValue -import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ -// This project import outputs.EnrichedEvent -// Specs2 -import org.specs2.Specification -import org.specs2.scalaz.ValidationMatchers - /** * Tests the anonymzeIp function */ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/RefererParserEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/RefererParserEnrichmentSpec.scala index 4632a92d9..cff86371c 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/RefererParserEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/RefererParserEnrichmentSpec.scala @@ -10,24 +10,13 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Java import java.net.URI -// Specs2 & Scalaz-Specs2 +import com.snowplowanalytics.refererparser.scala.{Medium, Referer} import org.specs2.Specification import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - -// Scalaz -import scalaz._ -import Scalaz._ - -// referer-parser -import com.snowplowanalytics.refererparser.scala.{Medium, Referer} /** * A small selection of tests partially borrowed from referer-parser. diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UaParserEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UaParserEnrichmentSpec.scala index b6d44c706..e3a6d7e09 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UaParserEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UaParserEnrichmentSpec.scala @@ -9,21 +9,14 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Specs2 import java.net.URI -import org.specs2.matcher.DataTables -import org.specs2.scalaz._ - -// Scalaz - -// Json4s import org.json4s._ import org.json4s.jackson.JsonMethods._ +import org.specs2.matcher.DataTables +import org.specs2.scalaz._ class UaParserEnrichmentSpec extends org.specs2.mutable.Specification with ValidationMatchers with DataTables { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UserAgentUtilsEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UserAgentUtilsEnrichmentSpec.scala index fad4aaf07..b7c76a0a7 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UserAgentUtilsEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/UserAgentUtilsEnrichmentSpec.scala @@ -9,29 +9,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Specs2 -import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz._ -// Scalaz -import scalaz._ -import Scalaz._ - class UserAgentUtilsEnrichmentSpec extends org.specs2.mutable.Specification with ValidationMatchers with DataTables { - import UserAgentUtilsEnrichment._ "useragent parser" should { "parse useragent" in { - "SPEC NAME" || "Input UserAgent" | "Browser name" | "Browser family" | "Browser version" | "Browser type" | "Browser rendering enging" | "OS fields" | "Device type" | "Device is mobile" | - "Safari spec" !! "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36" ! "Chrome 33" ! "Chrome" ! Some( - "33.0.1750.152") ! "Browser" ! "WEBKIT" ! ("Mac OS X", "Mac OS X", "Apple Inc.") ! "Computer" ! false | - "IE spec" !! "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0" ! "Internet Explorer 11" ! "Internet Explorer" ! Some( - "11.0") ! "Browser" ! "TRIDENT" ! ("Windows 7", "Windows", "Microsoft Corporation") ! "Computer" ! false |> { + "SPEC NAME" || "Input UserAgent" | "Browser name" | "Browser family" | "Browser version" | "Browser type" | "Browser rendering enging" | "OS fields" | "Device type" | "Device is mobile" |> + "Safari spec" !! "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36" ! "Chrome 33" ! "Chrome" ! Some("33.0.1750.152") ! "Browser" ! "WEBKIT" ! (("Mac OS X", "Mac OS X", "Apple Inc.")) ! "Computer" ! false | + "IE spec" !! "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0" ! "Internet Explorer 11" ! "Internet Explorer" ! Some("11.0") ! "Browser" ! "TRIDENT" ! (("Windows 7", "Windows", "Microsoft Corporation")) ! "Computer" ! false | { (_, input, diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/WeatherEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/WeatherEnrichmentSpec.scala index 6c8e25058..e4fb66f7f 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/WeatherEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/WeatherEnrichmentSpec.scala @@ -9,35 +9,22 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry -// Java import java.lang.{Float => JFloat} -// Specs2 -import org.specs2.Specification - -// Joda +import com.snowplowanalytics.iglu.client.SchemaKey import org.joda.time.DateTime - -// Json4s import org.json4s._ import org.json4s.jackson.JsonMethods.parse - -// Snowplow -import com.snowplowanalytics.iglu.client.SchemaKey - -// Scala weather -import com.snowplowanalytics.weather._ +import org.specs2.Specification object WeatherEnrichmentSpec { val OwmApiKey = "OWM_KEY" } -import WeatherEnrichmentSpec._ class WeatherEnrichmentSpec extends Specification { + import WeatherEnrichmentSpec._ def is = skipAllIf(sys.env.get(OwmApiKey).isEmpty) ^ // Actually only e4 and e6 need to be skipped s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentIntegrationTest.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentIntegrationTest.scala index eb5818686..8bb78f1c5 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentIntegrationTest.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentIntegrationTest.scala @@ -11,25 +11,17 @@ * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry -package apirequest +package enrichments.registry.apirequest -// json4s +import com.snowplowanalytics.iglu.client.{JsonSchemaPair, SchemaKey} import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.{parseJson, prettyJson} import org.json4s.jackson.JsonMethods.asJsonNode - -// specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers import org.specs2.matcher.Matcher -// Iglu -import com.snowplowanalytics.iglu.client.{JsonSchemaPair, SchemaKey} - -// This project import outputs.EnrichedEvent object ApiRequestEnrichmentIntegrationTest { @@ -47,7 +39,9 @@ object ApiRequestEnrichmentIntegrationTest { def createPair(key: SchemaKey, validJson: String): JsonSchemaPair = { val hierarchy = parseJson( s"""{"rootId":null,"rootTstamp":null,"refRoot":"events","refTree":["events","${key.name}"],"refParent":"events"}""") - (key, asJsonNode(("data", parseJson(validJson)) ~ ("hierarchy", hierarchy) ~ ("schema", key.toJValue))) + (key, asJsonNode(("data", parseJson(validJson)) ~ + (("hierarchy", hierarchy)) ~ + (("schema", key.toJValue)))) } } diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentSpec.scala index 650ac82e7..a1ba7abd5 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/ApiRequestEnrichmentSpec.scala @@ -10,27 +10,21 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.apirequest +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.apirequest -// Scalaz -import org.json4s.jackson.JsonMethods -import scalaz.Scalaz._ - -// json4s -import org.json4s._ -import org.json4s.jackson.parseJson - -// specs2 +import com.snowplowanalytics.iglu.client.JsonSchemaPair +import com.snowplowanalytics.iglu.client.SchemaKey import org.specs2.Specification import org.specs2.mock.Mockito import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ +import org.json4s._ +import org.json4s.jackson.JsonMethods +import org.json4s.jackson.parseJson -// Iglu -import com.snowplowanalytics.iglu.client.JsonSchemaPair -import com.snowplowanalytics.iglu.client.SchemaKey - -// Snowplow -import com.snowplowanalytics.snowplow.enrich.common.outputs.EnrichedEvent +import outputs.EnrichedEvent class ApiRequestEnrichmentSpec extends Specification with ValidationMatchers with Mockito { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/CacheSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/CacheSpec.scala index 97736abbd..457e78371 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/CacheSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/CacheSpec.scala @@ -10,22 +10,14 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.apirequest -// Scalaz -import scalaz._ -import Scalaz._ - -// json4s import org.json4s.JInt - -// specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers import org.specs2.mock.Mockito +import scalaz._ +import Scalaz._ class CacheSpec extends Specification with ValidationMatchers with Mockito { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/HttpApiSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/HttpApiSpec.scala index edccf060c..c0652710b 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/HttpApiSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/HttpApiSpec.scala @@ -10,13 +10,9 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.apirequest -// specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers import org.specs2.mock.Mockito diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/InputSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/InputSpec.scala index 6b6182589..767ff2502 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/InputSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/InputSpec.scala @@ -10,25 +10,16 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package enrichments -package registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.apirequest -// scalaz -import scalaz._ -import Scalaz._ - -// specs2 -import org.specs2.Specification -import org.specs2.scalaz.ValidationMatchers - -// json4s import org.json4s.JObject import org.json4s.jackson.parseJson +import org.specs2.Specification +import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ -// This project import outputs.EnrichedEvent class InputSpec extends Specification with ValidationMatchers { @@ -218,7 +209,7 @@ class InputSpec extends Specification with ValidationMatchers { HttpApi("GET", uriTemplate, 1000, Authentication(None)), List(Output("iglu:someschema", JsonOutput("$").some)), Cache(10, 5)) - val event = new common.outputs.EnrichedEvent + val event = new outputs.EnrichedEvent event.setUser_id("chuwy") // time in true_tstamp won't be found val request = enrichment.lookup(event, Nil, Nil, Nil) diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/OutputSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/OutputSpec.scala index 3e88bceea..8f70ae0d7 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/OutputSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/apirequest/OutputSpec.scala @@ -10,17 +10,12 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments.registry -package apirequest +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.apirequest -// specs2 -import org.specs2.Specification -import org.specs2.scalaz.ValidationMatchers - -// json4s import org.json4s.JObject import org.json4s.JsonDSL._ +import org.specs2.Specification +import org.specs2.scalaz.ValidationMatchers class OutputSpec extends Specification with ValidationMatchers { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/pii/PiiPseudonymizerEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/pii/PiiPseudonymizerEnrichmentSpec.scala index 7506a1c6f..fa28159c0 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/pii/PiiPseudonymizerEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/pii/PiiPseudonymizerEnrichmentSpec.scala @@ -10,42 +10,28 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow.enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package enrichments package registry package pii -// Specs2 & Scalaz-Specs2 -import org.specs2.Specification -import org.specs2.scalaz.ValidationMatchers - -// Scala -import org.json4s.jackson.JsonMethods.parse - -// Java import java.net.URI + +import com.snowplowanalytics.iglu.client.{Resolver, SchemaCriterion} +import com.snowplowanalytics.iglu.client.repositories.RepositoryRefConfig import org.joda.time.DateTime +import org.json4s.jackson.JsonMethods.parse import org.apache.commons.codec.digest.DigestUtils +import org.specs2.Specification +import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ -// Snowplow -import common.loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} -import common.outputs.EnrichedEvent -import common.adapters.AdapterRegistry +import loaders.{CollectorApi, CollectorContext, CollectorPayload, CollectorSource} +import outputs.EnrichedEvent +import utils.{ScalazJson4sUtils, TestResourcesRepositoryRef} +import SpecHelpers.toNameValuePairs import utils.TestResourcesRepositoryRef -import common.SpecHelpers.toNameValuePairs -import common.utils.TestResourcesRepositoryRef -import utils.ScalazJson4sUtils - -// Iglu -import iglu.client.SchemaCriterion -import iglu.client.Resolver -import iglu.client.repositories.RepositoryRefConfig -import iglu.client.validation.ValidatableJValue._ - -// Scalaz -import scalaz.Scalaz._ class PiiPseudonymizerEnrichmentSpec extends Specification with ValidationMatchers { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/InputSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/InputSpec.scala index 9ff6b08d2..a3c8a7cc1 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/InputSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/InputSpec.scala @@ -10,28 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package enrichments -package registry -package sqlquery +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.sqlquery -// Scala import scala.collection.immutable.IntMap -// scalaz -import scalaz._ -import Scalaz._ - -// specs2 -import org.specs2.Specification -import org.specs2.scalaz.ValidationMatchers - -// json4s import org.json4s.JObject import org.json4s.jackson.parseJson +import org.specs2.Specification +import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ -// This project import outputs.EnrichedEvent import Input._ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/OutputSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/OutputSpec.scala index 67de7621c..4ad3da7a4 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/OutputSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/OutputSpec.scala @@ -10,20 +10,15 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments.registry.sqlquery - -// json4s -import org.json4s._ +package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.sqlquery import java.sql.Date -// specs2 +import org.joda.time.DateTime +import org.json4s._ import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers -import org.joda.time.DateTime - class OutputSpec extends Specification with ValidationMatchers { def is = s2""" This is a specification to test the Output of SQL Query Enrichment diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentIntegrationTest.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentIntegrationTest.scala index 78be8df91..a900e00fc 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentIntegrationTest.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentIntegrationTest.scala @@ -10,24 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common.enrichments.registry.sqlquery +package com.snowplowanalytics.snowplow.enrich.common +package enrichments.registry.sqlquery -// json4s +import com.snowplowanalytics.iglu.client.{JsonSchemaPair, SchemaKey} import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.parseJson import org.json4s.jackson.JsonMethods.asJsonNode - -// specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers -// Iglu -import com.snowplowanalytics.iglu.client.{JsonSchemaPair, SchemaKey} - -// This library -import common.outputs.EnrichedEvent +import outputs.EnrichedEvent object SqlQueryEnrichmentIntegrationTest { def continuousIntegration: Boolean = sys.env.get("CI") match { @@ -46,11 +40,13 @@ object SqlQueryEnrichmentIntegrationTest { def createPair(key: SchemaKey, validJson: String): JsonSchemaPair = { val hierarchy = parseJson( s"""{"rootId":null,"rootTstamp":null,"refRoot":"events","refTree":["events","${key.name}"],"refParent":"events"}""") - (key, asJsonNode(("data", parseJson(validJson)) ~ ("hierarchy", hierarchy) ~ ("schema", key.toJValue))) + (key, asJsonNode(("data", parseJson(validJson)) ~ + (("hierarchy", hierarchy)) ~ + (("schema", key.toJValue)))) } def createDerived(key: SchemaKey, validJson: String): JObject = - ("schema", key.toSchemaUri) ~ ("data", parseJson(validJson)) + (("schema", key.toSchemaUri)) ~ (("data", parseJson(validJson))) } import SqlQueryEnrichmentIntegrationTest._ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentSpec.scala index 169965de8..e4998dac9 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/registry/sqlquery/SqlQueryEnrichmentSpec.scala @@ -12,16 +12,11 @@ */ package com.snowplowanalytics.snowplow.enrich.common.enrichments.registry.sqlquery -// json4s +import com.snowplowanalytics.iglu.client.SchemaKey import org.json4s.jackson.parseJson - -// specs2 import org.specs2.Specification import org.specs2.scalaz.ValidationMatchers -// Iglu -import com.snowplowanalytics.iglu.client.SchemaKey - class SqlQueryEnrichmentSpec extends Specification with ValidationMatchers { def is = s2""" This is a specification to test the SqlQueryEnrichment configuration diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ExtractPageUriSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ExtractPageUriSpec.scala index 96a9f7f1b..5790faa1e 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ExtractPageUriSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ExtractPageUriSpec.scala @@ -10,19 +10,13 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package enrichments -package web +package com.snowplowanalytics.snowplow.enrich.common.enrichments.web -// Java import java.net.URI -// Specs2 & Scalaz-Specs2 import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers - -// Scalaz import scalaz._ import Scalaz._ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ParseCrossDomainSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ParseCrossDomainSpec.scala index 74fc417cb..77f77f9d3 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ParseCrossDomainSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/enrichments/web/ParseCrossDomainSpec.scala @@ -12,15 +12,9 @@ */ package com.snowplowanalytics.snowplow.enrich.common.enrichments.web -// Java -import java.net.URI - -// Specs2 & Scalaz-Specs2 import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers - -// Scalaz import scalaz._ import Scalaz._ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CljTomcatLoaderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CljTomcatLoaderSpec.scala index a174c2806..398a9a172 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CljTomcatLoaderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CljTomcatLoaderSpec.scala @@ -13,24 +13,14 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Joda-Time import org.joda.time.DateTime - -// Scalaz -import scalaz._ -import Scalaz._ - -// Snowplow -import SpecHelpers._ - -// Specs2 import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ -// ScalaCheck -import org.scalacheck._ -import org.scalacheck.Arbitrary._ +import SpecHelpers._ class CljTomcatLoaderSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CloudfrontLoaderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CloudfrontLoaderSpec.scala index 551220a21..eaca11367 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CloudfrontLoaderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/CloudfrontLoaderSpec.scala @@ -13,26 +13,17 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Joda-Time import org.joda.time.DateTime - -// Scalaz +import org.scalacheck.Arbitrary._ +import org.specs2.{ScalaCheck, Specification} +import org.specs2.matcher.DataTables +import org.specs2.scalaz.ValidationMatchers import scalaz._ import Scalaz._ -// Snowplow import utils.ConversionUtils import SpecHelpers._ -// Specs2 -import org.specs2.{ScalaCheck, Specification} -import org.specs2.matcher.DataTables -import org.specs2.scalaz.ValidationMatchers - -// ScalaCheck -import org.scalacheck._ -import org.scalacheck.Arbitrary._ - class CloudfrontLoaderSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" This is a specification to test the CloudfrontLoader functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/IpAddressExtractorSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/IpAddressExtractorSpec.scala index 97fb1afef..e9a10380a 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/IpAddressExtractorSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/IpAddressExtractorSpec.scala @@ -12,7 +12,6 @@ */ package com.snowplowanalytics.snowplow.enrich.common.loaders -// Specs2 import org.specs2.mutable.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/LoaderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/LoaderSpec.scala index bbedb6377..7c9d2edd5 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/LoaderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/LoaderSpec.scala @@ -13,17 +13,15 @@ package com.snowplowanalytics.snowplow.enrich.common package loaders -// Scalaz -import scalaz._ -import Scalaz._ - -// Snowplow -import SpecHelpers._ +import java.nio.charset.StandardCharsets.UTF_8 -// Specs2 import org.specs2.mutable.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ + +import SpecHelpers._ object LoaderSpec { @@ -59,7 +57,7 @@ class LoaderSpec extends Specification with DataTables with ValidationMatchers { "extractGetPayload" should { - val Encoding = "UTF-8" + val Encoding = UTF_8 // TODO: add more tests "return a Success-boxed NonEmptyList of NameValuePairs for a valid or empty querystring" in { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/NdjsonLoaderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/NdjsonLoaderSpec.scala index 7b5e9414a..cba1e6c63 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/NdjsonLoaderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/NdjsonLoaderSpec.scala @@ -14,9 +14,7 @@ package com.snowplowanalytics.snowplow.enrich.common.loaders import org.specs2.mutable.Specification import org.specs2.scalaz.ValidationMatchers - import scalaz._ -import Scalaz._ class NdjsonLoaderSpec extends Specification with ValidationMatchers { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/ThriftLoaderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/ThriftLoaderSpec.scala index a8c5be8eb..7046eff06 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/ThriftLoaderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/ThriftLoaderSpec.scala @@ -10,32 +10,18 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow -package enrich -package common +package com.snowplowanalytics.snowplow.enrich.common package loaders -// Commons Codec import org.apache.commons.codec.binary.Base64 - -// Joda-Time import org.joda.time.DateTime - -// Scalaz -import scalaz._ -import Scalaz._ - -// Snowplow -import SpecHelpers._ - -// Specs2 import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ -// ScalaCheck -import org.scalacheck._ -import org.scalacheck.Arbitrary._ +import SpecHelpers._ class ThriftLoaderSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/TsvLoaderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/TsvLoaderSpec.scala index 58e36b57d..9e652ab1c 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/TsvLoaderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/TsvLoaderSpec.scala @@ -10,32 +10,13 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow -package enrich -package common -package loaders +package com.snowplowanalytics.snowplow.enrich.common.loaders -// Commons Codec -import org.apache.commons.codec.binary.Base64 - -// Joda-Time -import org.joda.time.DateTime - -// Scalaz -import scalaz._ -import Scalaz._ - -// Snowplow -import SpecHelpers._ - -// Specs2 import org.specs2.{ScalaCheck, Specification} import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers - -// ScalaCheck -import org.scalacheck._ -import org.scalacheck.Arbitrary._ +import scalaz._ +import Scalaz._ class TsvLoaderSpec extends Specification with DataTables with ValidationMatchers with ScalaCheck { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/collectorPayloadSpecs.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/collectorPayloadSpecs.scala index b4760197d..e2a707c41 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/collectorPayloadSpecs.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/loaders/collectorPayloadSpecs.scala @@ -10,17 +10,8 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package loaders +package com.snowplowanalytics.snowplow.enrich.common.loaders -// Scalaz -import scalaz._ -import Scalaz._ - -// Snowplow -import SpecHelpers._ - -// Specs2 import org.specs2.mutable.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/JsonPathSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/JsonPathSpec.scala index 134b47a0b..5f22f3192 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/JsonPathSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/JsonPathSpec.scala @@ -12,13 +12,10 @@ */ package com.snowplowanalytics.snowplow.enrich.common.utils -// specs2 -import org.specs2.Specification -import org.specs2.scalaz.ValidationMatchers - -// json4s import org.json4s._ import org.json4s.jackson.parseJson +import org.specs2.Specification +import org.specs2.scalaz.ValidationMatchers class JsonPathSpec extends Specification with ValidationMatchers { def is = s2""" diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/MapTransformerSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/MapTransformerSpec.scala index d7cd44d21..5caaa6b26 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/MapTransformerSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/MapTransformerSpec.scala @@ -13,24 +13,15 @@ package com.snowplowanalytics.snowplow.enrich.common package utils -// Scala import scala.beans.BeanProperty -// Scalaz -import scalaz._ -import Scalaz._ - -// Specs2 & Scalaz-Specs2 -import org.specs2.mutable.Specification -import org.specs2.scalaz.ValidationMatchers - -// Utils import org.apache.commons.lang3.builder.ToStringBuilder import org.apache.commons.lang3.builder.HashCodeBuilder +import org.specs2.mutable.Specification +import org.specs2.scalaz.ValidationMatchers -// This project import MapTransformer._ -import enrichments.{ClientEnrichments, EventEnrichments, MiscEnrichments} +import enrichments.{ClientEnrichments, MiscEnrichments} // Test Bean final class TargetBean { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ScalazJson4sSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ScalazJson4sSpec.scala index bc8284e3e..9b61b1ae2 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ScalazJson4sSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ScalazJson4sSpec.scala @@ -13,24 +13,11 @@ package com.snowplowanalytics.snowplow.enrich.common package utils -// Java -import java.lang.{Byte => JByte} - -// Scalaz -import scalaz._ -import Scalaz._ - -// json4s import org.json4s.jackson.JsonMethods.parse import org.json4s.DefaultFormats - -// Specs2 import org.specs2.mutable.Specification import org.specs2.scalaz.ValidationMatchers -/** - * Tests ScalazJson4sUtils - */ class JsonExtractionSpec extends Specification with ValidationMatchers { implicit val formats = DefaultFormats val testJson = parse("""{ diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/TestResourcesRepositoryRef.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/TestResourcesRepositoryRef.scala index 39b255672..5b24c6757 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/TestResourcesRepositoryRef.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/TestResourcesRepositoryRef.scala @@ -1,27 +1,29 @@ -package com.snowplowanalytics -package snowplow.enrich.common.utils +/* + * Copyright (c) 2014-2019 Snowplow Analytics Ltd. All rights reserved. + * + * This program is licensed to you under the Apache License Version 2.0, + * and you may not use this file except in compliance with the Apache License Version 2.0. + * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the Apache License Version 2.0 is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. + */ +package com.snowplowanalytics.snowplow.enrich.common.utils -// Java import java.io.IOException -// Scala import scala.util.control.NonFatal -// Jackson import com.fasterxml.jackson.core.JsonParseException import com.fasterxml.jackson.databind.JsonNode import com.github.fge.jackson.JsonLoader - -// Scalaz -import scalaz.Scalaz._ - -//Snowplow +import com.snowplowanalytics.iglu.client.{SchemaKey, Validated} import com.snowplowanalytics.iglu.client.repositories.{RepositoryRef, RepositoryRefConfig} -import com.snowplowanalytics.iglu.client.{SchemaKey, Validated, utils, validation} - -// Iglu -import iglu.client.utils.{ValidationExceptions => VE} -import iglu.client.validation.ProcessingMessageMethods._ +import com.snowplowanalytics.iglu.client.utils.{ValidationExceptions => VE} +import com.snowplowanalytics.iglu.client.validation.ProcessingMessageMethods._ +import scalaz.Scalaz._ /** * Iglu repository ref that looks up a schema in test resources. diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ValidateAndReformatJsonSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ValidateAndReformatJsonSpec.scala index 6a2fe931f..58db4154b 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ValidateAndReformatJsonSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/ValidateAndReformatJsonSpec.scala @@ -10,17 +10,8 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package utils +package com.snowplowanalytics.snowplow.enrich.common.utils -// Java -import java.lang.{Byte => JByte} - -// Scalaz -import scalaz._ -import Scalaz._ - -// Specs2 import org.specs2.Specification import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/conversionUtilsSpecs.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/conversionUtilsSpecs.scala index f93167d32..165712a6d 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/conversionUtilsSpecs.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/conversionUtilsSpecs.scala @@ -10,21 +10,17 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich.common -package utils +package com.snowplowanalytics.snowplow.enrich.common.utils -// Java import java.net.URI -// Scalaz -import scalaz._ -import Scalaz._ - -// Specs2 +import org.scalacheck.Arbitrary._ import org.specs2.{ScalaCheck, Specification} import org.specs2.mutable.{Specification => MutableSpecification} import org.specs2.matcher.DataTables import org.specs2.scalaz.ValidationMatchers +import scalaz._ +import Scalaz._ class StringToUriSpec extends MutableSpecification with ValidationMatchers { diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/ShredderSpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/ShredderSpec.scala index b7c2a65e3..e7ab1d4e7 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/ShredderSpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/ShredderSpec.scala @@ -10,19 +10,13 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics -package snowplow -package enrich -package common -package utils -package shredder - -// Snowplow Common Enrich -import outputs.EnrichedEvent +package com.snowplowanalytics.snowplow.enrich.common +package utils.shredder -// Specs2 import org.specs2.Specification +import outputs.EnrichedEvent + class ShredderSpec extends Specification { def is = s2""" This is a specification to test the Shredder functionality diff --git a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/TypeHierarchySpec.scala b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/TypeHierarchySpec.scala index 499f8d6be..c5481e94b 100644 --- a/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/TypeHierarchySpec.scala +++ b/modules/common/src/test/scala/com.snowplowanalytics.snowplow.enrich.common/utils/shredder/TypeHierarchySpec.scala @@ -10,12 +10,8 @@ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ -package com.snowplowanalytics.snowplow.enrich -package common -package utils -package shredder +package com.snowplowanalytics.snowplow.enrich.common.utils.shredder -// Specs2 import org.specs2.Specification class TypeHierarchySpec extends Specification { diff --git a/project/CommonSettings.scala b/project/CommonSettings.scala index 8f500283b..03b454200 100644 --- a/project/CommonSettings.scala +++ b/project/CommonSettings.scala @@ -32,27 +32,10 @@ object BuildSettings { lazy val basicSettings = Seq( organization := "com.snowplowanalytics", scalaVersion := "2.11.11", - scalacOptions := compilerOptions, - scalacOptions in Test := Seq("-Yrangepos"), javacOptions := javaCompilerOptions, resolvers ++= Dependencies.resolutionRepos ) - lazy val compilerOptions = Seq( - "-deprecation", - "-encoding", "UTF-8", - "-feature", - "-language:existentials", - "-language:higherKinds", - "-language:implicitConversions", - "-unchecked", - "-Yno-adapted-args", - "-Ywarn-dead-code", - "-Ywarn-numeric-widen", - "-Xfuture", - "-Xlint" - ) - lazy val javaCompilerOptions = Seq( "-source", "1.8", "-target", "1.8"