Modify container tag to opnfv in spark-submit 53/64453/1
authorearrage <eddie.arrage@huawei.com>
Sat, 3 Nov 2018 01:26:46 +0000 (18:26 -0700)
committerearrage <eddie.arrage@huawei.com>
Sat, 3 Nov 2018 01:31:31 +0000 (18:31 -0700)
- Change tag from local repo to opnfv for clover-spark
container, which spark-submit references
- Move update of configured visibility services into
loop

Change-Id: I9d09a851977c6006ea2cbba45d7a28be6a4660b9
Signed-off-by: earrage <eddie.arrage@huawei.com>
clover/spark/docker/clover-spark/build.sh
clover/spark/docker/spark-submit/runner.sh
clover/spark/docker/spark-submit/runner_fast.sh
clover/spark/src/main/scala/CloverSlow.scala

index a1a8788..d139b35 100755 (executable)
@@ -7,7 +7,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 
-IMAGE_PATH=${IMAGE_PATH:-"kube1-node1:5000"}
+IMAGE_PATH=${IMAGE_PATH:-"localhost:5000"}
 IMAGE_NAME=${IMAGE_NAME:-"clover-spark:latest"}
 
 # Copy clover-spark jar first
index b98ff32..5de3824 100755 (executable)
@@ -12,7 +12,7 @@
 ./runner_fast.sh &
 
 IMAGE_NAME=${IMAGE_NAME:-"clover-spark:latest"}
-IMAGE_PATH=${IMAGE_PATH:-"localhost:5000"}
+IMAGE_PATH=${IMAGE_PATH:-"opnfv"}
 CLASS_NAME=${CLASS_NAME:-"CloverSlow"}
 JAR_NAME=${JAR_NAME:-"clover-spark_2.11-1.0.jar"}
 
index 2381351..0a387b2 100755 (executable)
@@ -9,7 +9,7 @@
 #
 
 IMAGE_NAME=${IMAGE_NAME:-"clover-spark:latest"}
-IMAGE_PATH=${IMAGE_PATH:-"localhost:5000"}
+IMAGE_PATH=${IMAGE_PATH:-"opnfv"}
 CLASS_NAME=${CLASS_NAME:-"CloverFast"}
 JAR_NAME=${JAR_NAME:-"clover-spark_2.11-1.0.jar"}
 
index 1866d72..c389967 100644 (file)
@@ -42,38 +42,38 @@ object CloverSlow {
     .config("spark.cassandra.connection.port", "9042")
     .getOrCreate()
 
-    val services = redis.smembers("visibility_services")
-
     spark
     .read.cassandraFormat("spans", "visibility")
     .load()
     .createOrReplaceTempView("curspans")
 
-    if (distinct_url_service) {
-        // Get number of distinct URLs per service (node_id)
-        for (s <- services.get) {
-            val service = s.get
-            val perurl = spark.sql(
-            s"""
-                |SELECT node_id,count(distinct http_url)
-                |as urls,collect_set(http_url) as values
-                |FROM curspans
-                |WHERE node_id LIKE '%$service%'
-                |GROUP BY node_id
-            """.stripMargin)
-            for ((row) <- perurl.collect) {
-                println(row)
-                val node_id = row.get(0)
-                val url_count  = row.get(1)
-                val url_distinct  = row.getList(2).toString
-                redis.hmset(service, Map("node_id" -> node_id,
-                                         "url_count" -> url_count,
-                                         "url_distinct" -> url_distinct))
+    for(  x <- 1 to 500 ) {
+
+        val services = redis.smembers("visibility_services")
+
+        if (distinct_url_service) {
+            // Get number of distinct URLs per service (node_id)
+            for (s <- services.get) {
+                val service = s.get
+                val perurl = spark.sql(
+                s"""
+                    |SELECT node_id,count(distinct http_url)
+                    |as urls,collect_set(http_url) as values
+                    |FROM curspans
+                    |WHERE node_id LIKE '%$service%'
+                    |GROUP BY node_id
+                """.stripMargin)
+                for ((row) <- perurl.collect) {
+                    println(row)
+                    val node_id = row.get(0)
+                    val url_count  = row.get(1)
+                    val url_distinct  = row.getList(2).toString
+                    redis.hmset(service, Map("node_id" -> node_id,
+                                             "url_count" -> url_count,
+                                             "url_distinct" -> url_distinct))
+                }
             }
         }
-    }
-
-    for(  x <- 1 to 500 ) {
 
         if (response_times) {
             try {