Added partial spatial_resource_waste results

This commit is contained in:
Claudio Maggioni 2021-03-10 16:03:54 +00:00
parent 3599731bee
commit ab43fc752f
11 changed files with 100 additions and 2 deletions

1
.gitignore vendored
View File

@ -1 +1,2 @@
**/.ipynb_checkpoints/
task_slowdown/?_state_changes.json.gz

View File

@ -0,0 +1 @@
{"ram-7":1127244247782832927155,"cpu-None":870398237374763975,"cpu-4":72742419206559169141,"cpu-6":2607089896396096,"cpu-5":1495031974299445237,"ram-6":1771756194157336,"ram-8":6333363246670093671,"ram-None":2278909955299490338,"cpu-8":10453981314265400643,"ram-4":39996068595477816843,"cpu-7":2477266899831924821150,"ram-5":820784715799101816}

View File

@ -0,0 +1 @@
{"ram-7":12179141128046836944,"cpu-7":34597851715881461865,"cpu-4":1277842979019967,"cpu-None":35533494863090,"cpu-8":3374885680509406402,"ram-4":668293322041128,"ram-6":2511524364624386,"cpu-6":4821872805818277,"ram-5":706697505000608,"cpu-5":1162463756523225,"ram-None":27186835739992,"ram-8":2049278981402678298}

View File

@ -0,0 +1 @@
{"ram-5":179035340732121,"cpu-4":2270526535924296,"ram-8":2487571826630012614,"ram-6":2086009261347319,"ram-None":27168309709802,"cpu-7":21730608701477619298,"cpu-None":27583972605824,"cpu-5":332120744531192,"ram-4":857733836230531,"ram-7":9777641422713517228,"cpu-6":3964312633427001,"cpu-8":4608310732727489575}

View File

@ -0,0 +1 @@
{"cpu-7":3219116999150169319,"ram-6":1647710699476883,"ram-8":260364672579256213,"ram-4":806709546640509,"cpu-8":147212088471680302,"cpu-None":31971457860286,"cpu-5":799246418531183,"ram-7":12436225750822013784,"ram-5":351712157688860,"cpu-4":1552997044318409,"cpu-6":3215224470926967,"ram-None":16263229321240}

View File

@ -0,0 +1 @@
{"ram-5":66503777641532,"cpu-6":4495142257328038,"ram-None":23121843616336,"ram-4":450975658032824,"ram-6":1839364680294388,"ram-8":48132672437,"cpu-8":168275090998,"cpu-4":973354075958514,"cpu-None":40596607654538,"cpu-7":263169443298146034,"ram-7":137990934175287517,"cpu-5":138098915874971}

View File

@ -0,0 +1 @@
{"ram-None":23826458848957,"cpu-None":37871271012381,"ram-7":7034694158105661427,"ram-8":758962327811038229,"ram-5":160890376148062,"ram-6":1695862184893255,"cpu-8":1859988166894731392,"ram-4":1049853920898815,"cpu-5":304290269227378,"cpu-4":2661349481170749,"cpu-6":4430941512936864,"cpu-7":31458868334932353789}

View File

@ -0,0 +1 @@
{"ram-None":13356593108037,"ram-7":552422233016770826,"cpu-7":602467429151176297,"ram-5":69915734990220,"cpu-8":1593431879738519966,"ram-6":2974209604217382,"cpu-6":3875133604782068,"cpu-5":96483209833658,"ram-4":451120923520151,"cpu-4":749109000083736,"ram-8":1228709892211319129,"cpu-None":28232654256464}

View File

@ -0,0 +1,91 @@
#!/usr/bin/env python3
# coding: utf-8
import json
import pandas
from IPython import display
import findspark
findspark.init()
import pyspark
import pyspark.sql
import sys
import gzip
from pyspark import AccumulatorParam
from pyspark.sql.functions import lit
from pyspark.sql import Window
from pyspark.sql.types import ByteType
cluster=sys.argv[1]
spark = pyspark.sql.SparkSession.builder \
.appName("task_slowdown") \
.config("spark.driver.maxResultSize", "32g") \
.config("spark.local.dir", "/run/tmpfiles.d/spark") \
.config("spark.driver.memory", "75g") \
.getOrCreate()
sc = spark.sparkContext
df = spark.read.json("/home/claudio/google_2019/instance_events/" + cluster + "/" + cluster + "_instance_events*.json.gz")
#df = spark.read.json("/home/claudio/google_2019/instance_events/" + cluster + "/" + cluster + "_test.json")
try:
df["collection_type"] = df["collection_type"].cast(ByteType())
except:
df = df.withColumn("collection_type", lit(None).cast(ByteType()))
RUN = set([(3,1), (3,4), (3,5), (3,6), (3,7), (3,8), (3,10), (10,1), (10,4), (10,5), (10,6), (10,7), (10,8), (10,10)])
def is_res_none(tres):
return tres is None or tres["cpus"] is None or tres["memory"] is None
def for_each_task(ts):
ts = sorted(ts, key=lambda x: x["time"])
last_term = None
last_resources = None
prev = None
cpu = 0
ram = 0
for i,t in enumerate(ts):
if t["type"] >= 4 and t["type"] <= 8:
last_term = t["type"]
if prev is not None:
if (prev["type"], t["type"]) in RUN:
if is_res_none(last_resources):
last_resources = t["res"]
if not is_res_none(last_resources):
delta = t["time"] - prev["time"]
cpu += round(delta * last_resources["cpus"])
ram += round(delta * last_resources["memory"])
prev = t
if not is_res_none(last_resources):
last_resources = t["res"]
return [("cpu-" + str(last_term), cpu), ("ram-" + str(last_term), ram)]
def cleanup(x):
return {
"time": int(x.time),
"type": 0 if x.type is None else int(x.type),
"id": x.collection_id + "-" + x.instance_index,
"res": x.resource_request
}
df2 = df.rdd \
.filter(lambda x: x.collection_type is None or x.collection_type == 0) \
.filter(lambda x: x.time is not None and x.instance_index is not None and x.collection_id is not None) \
.map(cleanup) \
.groupBy(lambda x: x["id"]) \
.mapValues(for_each_task) \
.flatMap(lambda x: x[1]) \
.groupBy(lambda x: x[0]) \
.mapValues(lambda xs: sum(n for _, n in xs)) \
.collect()
result = {}
for pair in df2:
result[pair[0]] = pair[1]
with open(cluster + "_res_micros_requested.json", "w") as out:
json.dump(result, out, separators=(',', ':'))

View File

@ -1 +0,0 @@
task_slowdown/?_state_changes.json.gz

View File

@ -38,7 +38,7 @@ Google drive.
- (%) total wasted time per unsuccessful event type
- (mins.) avg. wasted time per number of events for each event type
- breakdown of wasted time per *submission*, *scheduling*, *queue*
- *III-A-I: Average slowdown per task*: (Table II)
- [&#x2705; **task_slowdown**] *III-A-I: Average slowdown per task*: (Table II)
For FINISH type tasks, compute *slowdown*, i.e. mean (**ask Rosa**) of all
*response time* for each task event over *response time* of last event (which
is by def. FINISH). Response time is defined as *Queue time* + *Exec time*