# # Collective Knowledge: crowdbenchmarking using ARM's workload automation and CK # # See CK LICENSE.txt for licensing details # See CK COPYRIGHT.txt for copyright details # # Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net # cfg={} # Will be updated by CK (meta description of this module) work={} # Will be updated by CK (temporal data) ck=None # Will be updated by CK (initialized CK kernel) # Local settings compiler_choices='#choices#compiler_flags#' line='================================================================' fsummary='summary.json' fclassification='classification.json' fgraph='tmp-reactions-graph.json' ffstat='ck-stat-flat-characteristics.json' form_name='wa_web_form' onchange='document.'+form_name+'.submit();' hextra='' #hextra+='
Debug time (CK query): '+str(time.time()-dt)+' sec.
\n' lst=r['lst'] # Find unique variables dt=time.time() choices={} mchoices={} # cache of UID -> alias choices wchoices={} cache_meta={} for q in lst: d=q['meta'] meta=d.get('meta',{}) # Process some derivatives scenario=meta.get('crowd_uid','') kscenario=cfg['module_deps']['experiment.scenario.mobile']+':'+scenario if kscenario not in cache_meta: r=ck.access({'action':'load', 'module_uoa':cfg['module_deps']['experiment.scenario.mobile'], 'data_uoa':scenario}) if r['return']>0: return r xd=r['dict'] # Find model size for q in xd.get('files',[]): if q.get('model_weights','')=='yes': xd['model_weights_size']=int(q.get('file_size',0)/1E6) break cache_meta[kscenario]=xd else: xd=cache_meta[kscenario] if meta.get('engine','')=='': meta['engine']=xd.get('engine','') if meta.get('model','')=='': meta['model']=xd.get('model','') meta['engine_meta']=xd.get('engine_meta',{}) # Process selector meta for kk in selector: kx=kk['key'] k=ckey+kx if k not in choices: choices[k]=[] wchoices[k]=[{'name':'','value':''}] v=meta.get(kx,'') if v!='': if v not in choices[k]: choices[k].append(v) muoa=kk.get('module_uoa','') vv=v if muoa!='': if k not in mchoices: mchoices[k]={} vv=mchoices[k].get(v,'') if vv=='': r=ck.access({'action':'load', 'module_uoa':muoa, 'data_uoa':v}) if r['return']==0: mk=kk.get('module_key','') if mk=='': mk='##data_name' rx=ck.get_by_flat_key({'dict':r, 'key':mk}) if rx['return']>0: return rx vv=rx['value'] if vv=='' or vv==None: vv=v mchoices[k][v]=vv wchoices[k].append({'name':vv, 'value':v}) if debug: h+='\n
Debug time (CK find unique vars): '+str(time.time()-dt)+' sec.
\n'
# Prepare query div ***************************************************************
dt=time.time()
if cmuoa=='':
# Start form + URL (even when viewing entry)
r=ck.access({'action':'start_form',
'module_uoa':cfg['module_deps']['wfe'],
'url':url1,
'name':form_name})
if r['return']>0: return r
h+=r['html']
for kk in selector:
k=ckey+kk['key']
n=kk['name']
nl=kk.get('new_line','')
if nl=='yes':
h+='
\n
Debug time (prepare selector): '+str(time.time()-dt)+' sec.
\n'
# Check hidden
if hi_uid!='':
h+='\n'
if hi_user!='':
h+='\n'
h+='
'
# Prune list ******************************************************************
dt=time.time()
plst=[]
for q in lst:
d=q['meta']
meta=d.get('meta',{})
# Check selector
skip=False
for kk in selector:
k=kk['key']
n=kk['name']
v=kk.get('value','')
if v!='' and meta.get(k,'')!=v:
skip=True
break # FGG added later - should be correct
if not skip:
# Process raw results
arr=d.get('all_raw_results',[])
for g in arr:
nn=copy.deepcopy(q)
ih=g.get('image_height',0)
iw=g.get('image_width',0)
it=0
if ih!=0 and iw!=0:
it=ih*iw
key=str(ih)+' x '+str(iw)
prd=g.get('prediction','')
if prd!='':
j1=prd.find('\n')
if j1>0:
j2=prd.find('\n',j1+1)
if j2>0:
prd=prd[j1:j2]
# Check timing - currently temporal ugly hack
t=g.get('time',[])
tmin=0
tmax=0
if len(t)>0:
tmin=min(t)/1E3
tmax=max(t)/1E3
nn['extra']={'key':key, 'raw_results':g, 'time_min':tmin, 'time_max':tmax, 'prediction':prd}
# Check xOpenME timing
xopenme=g.get('xopenme',{})
xxopenme={}
if type(xopenme)==dict:
for xk in xopenme:
t=xopenme[xk]
tmin=0
tmax=0
if len(t)>0:
tmin=min(t)
tmax=max(t)
xxopenme['xopenme_'+xk+'_min']=tmin
xxopenme['xopenme_'+xk+'_max']=tmax
nn['extra'].update(xxopenme)
plst.append(nn)
if debug: h+='\n
Debug time (prune entries by user selection): '+str(time.time()-dt)+' sec.
\n' # Add extra selectors h+='
\n' # Sort first before prunning dt=time.time() if v9a=='throughput': splst=sorted(plst, key=lambda x: x.get('extra',{}).get('xopenme_execution_time_kernel_2_min',0)) else: splst=sorted(plst, key=lambda x: x.get('extra',{}).get('time_min',0)) if debug: h+='\n
Debug time (sorting table): '+str(time.time()-dt)+' sec.
\n' # Demo graph bgraph={'0':[]} # Just for graph demo if hi_uid!='' or hi_user!='': bgraph['1']=[] # execution time vs cost bgraph2={'0':[]} igraph2={'0':[]} # execution time vs cost bgraph3={'0':[]} igraph3={'0':[]} f_bgraph3={} f_igraph3={} # Check if too many lplst=len(plst) min_view=False view_all=i.get('all','') if view_all!='': h+='\n' # Advertisement h+='
# | \n' h+='Platform | \n' h+='Crowd scenario | \n' if not min_view: h+='Versions | \n' h+='Model weight size | \n' x='' if v9a=='latency': x=' style="color:#7f0000"' h+='Total time (min/max sec.) * LATENCY * | \n'
if not min_view:
h+=' Init network time (min/max sec.) | \n' h+='Image preparation (min/max sec.) | \n' x='' if v9a=='throughput': x=' style="color:#7f0000"' h+='Classification time (min/max sec.) * THROUGHPUT * | \n'
h+=' Prediction probability | \n' h+='Power consumption (W) min / max | \n'
h+=' Memory usage (MB) | \n' h+='Model accuracy on ImageNet | \n' h+='Model topology and parameters | \n' h+='HW costs | \n' h+='All usage costs (preparation, training, inference, errors, etc) | \n' if not min_view: h+='Mispredictions and unexpected behavior | \n' h+='Image features | \n' h+='CPU | \n' if not min_view: h+='CPU ABI | \n' h+='GPU | \n' h+='OS | \n' h+='Data UID / Behavior UID | \n' if not min_view: h+='User | \n' h+='
'+str(ix)+' | \n' x=plat_name if plat_uid!='': x=''+x+'' h+=''+x+' | \n' # Output scenario xx=mchoices.get(ckey+'crowd_uid',{}).get(scenario,'') kscenario=cfg['module_deps']['experiment.scenario.mobile']+':'+scenario xd=cache_meta[kscenario] xx=xd.get('title','') model_weights_size=int(xd.get('model_weights_size',0))+1 fbsize=xd.get('features',{}).get('fixed_batch_size','') h+=''+xx+' | \n' # Versions if not min_view: ver='' dver=meta.get('engine_meta',{}).get(cpu_abi,{}) ver+='main: '+str(dver.get('program_version',''))+'\n' dps=dver.get('deps_versions',{}) for dx in dps: ver+=dx+': '+str(dps[dx].get('version',''))+'\n' ver=ver.replace("\'","'").replace("'","\\'").replace('\"','"').replace('"',"\\'").replace('\n','\\n') if ver!='': ver='' h+=''+ver+' | \n' # Model weight size h+=''+str(model_weights_size)+' MB | \n' # Check relative time xx=''+('%.3f'%tmin)+' / '+('%.3f'%tmax) if tmin==0: xx+=''+xx+' | \n' # Finer grain timing ttmin=tmin ttdelta=tmax-tmin if not min_view: for ixo in range(0,3): tmin=extra.get('xopenme_execution_time_kernel_'+str(ixo)+'_min',0) tmax=extra.get('xopenme_execution_time_kernel_'+str(ixo)+'_max',0) if tmin<0: tmin=0 # detected bug xx=''+('%.3f'%tmin)+' / '+('%.3f'%tmax) if tmin==0 and ixo!=1: xx+=''+xx+' | \n' # Accuracy x=pred j=x.find('-') if j>0: x=x[:j-1].strip() h+=''+x+' | \n' # Get info about platform hd={} if plat_uid!='': rh=ck.access({'action':'load', 'module_uoa':cfg['module_deps']['platform'], 'data_uoa':plat_uid}) if rh['return']==0: hd=rh['dict'] # Energy TBD x='-' if len(hd)>0: power=hd.get('features',{}).get('power_consumption',{}) if len(power)>0: pmin=power.get('min','') pmax=power.get('max','') x=str(pmin)+' / '+str(pmax) h+=''+x+' | \n' # Memory usage TBD h+='- | \n' # Accuracy (take from model info) acc=xd.get('features',{}).get('accuracy','') acc5=xd.get('features',{}).get('accuracy_top5','') x=str(acc) if acc5!='': x+=' / '+str(acc5) if x=='': x='-' h+=''+x+' | \n' # Will be used to optimize model topology and parameters x='default' xfiles=xd.get('files',[]) for xf in xfiles: if xf.get('filename','')=='deploy.prototxt': xx1=xf.get('from_data_uoa','') if xx1=='': xx1=scenario xx2=xf.get('path','') x='deploy.prototxt\n' break h+=''+x+' | \n' # Cost (take from platform meta) hc='-' last_cost=0 if len(hd)>0: costs=hd.get('features',{}).get('cost',[]) hc='' if len(costs)>0: last_cost=int(costs[0].get('price','0')) for c in costs: if hc!='': hc+=''+hc+' | \n' # TBD: all other costs h+='\n' # Mispredictions and unexpected behavior if not min_view: x='' for q in mp: ca=q.get('correct_answer','') mi=q.get('mispredicted_image','') mr=q.get('misprediction_results','') if mr!='': j1=mr.find('\n') if j1>0: j2=mr.find('\n',j1+1) if j2>0: mr=mr[j1:j2] xx=ca if mi!='': y=work['self_module_uid'] if cmuoa!='': y=cmuoa url=url0+'action=pull&common_action=yes&cid='+y+':'+duid+'&filename='+mi if ca=='': ca='unknown' xx=''+ca+'' if x!='': x+=' | '+x+' | \n' # All images h+=''+key.replace(' ',' ')+' | \n' # Extra info about platform x=cpu_name if cpu_uid!='': x=''+x+'' h+=''+x+' | \n' # CPU ABI if not min_view: h+=''+cpu_abi+' | \n' x=gpu_name if gpu_uid!='': x=''+x+'' h+=''+x+' | \n' x=os_name if os_uid!='': x=''+x+'' h+=''+x+' | \n' # Data x=work['self_module_uid'] if cmuoa!='': x=cmuoa raw_data_url=url0+'wcid='+x+':'+duid h+=''+duid+' '+buid+' | \n' # User if not min_view: h+=''+user+' | \n' h+='||
Debug time (preparing html of a table): '+str(time.time()-dt)+' sec.
\n'
if cmuoa=='':
h+='\n'
# 1st graph
if len(bgraph['0'])>0:
dt=time.time()
ii={'action':'plot',
'module_uoa':cfg['module_deps']['graph'],
"table":bgraph,
"ymin":0,
"ignore_point_if_none":"yes",
"plot_type":"d3_2d_bars",
"display_y_error_bar":"no",
"title":"Powered by Collective Knowledge",
"x_ticks_period":50,
"axis_x_desc":"Experiment",
"axis_y_desc":"DNN image classification time (s)",
"plot_grid":"yes",
"d3_div":"ck_interactive",
"image_width":"900",
"image_height":"400",
"wfe_url":url0}
# r=ck.access(ii)
# if r['return']==0:
# x=r.get('html','')
# if x!='':
# st+=r.get('style','')
#
# h+='
\n'
# h+='
\n
\n' if debug: hhh+='\n
Debug time (preparing graph): '+str(time.time()-dt)+' sec.
\n' # Adding table hhh+=h rr={'return':0, 'style':st} if minimal: rr['graph_html']=graph_html rr['style']=graph_style rr['table']=bgraph3 rr['table_info']=igraph3 else: rr['html']=hhh return rr ############################################################################## # process raw results from mobile devices def process(i): """ Input: { } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ import copy # Debug # ck.save_json_to_file({'json_file':'/tmp/xyz1.json','dict':i}) crowd_uid=i.get('crowd_uid','') email=i.get('email','') raw_results=i.get('raw_results',{}) cfb=i.get('cpu_freqs_before',{}) cfa=i.get('cpu_freqs_after',{}) features=i.get('platform_features',{}) fplat=features.get('platform',{}) fos=features.get('os',{}) fcpu=features.get('cpu',{}) fgpu=features.get('gpu',{}) plat_name=fplat.get('name','') plat_uid=features.get('platform_uid','') os_name=fos.get('name','') os_uid=features.get('os_uid','') gpu_uid=features.get('gpu_uid','') cpu_name=fcpu.get('name','') cpu_abi=fcpu.get('cpu_abi','') if cpu_name=='' and cpu_abi!='': cpu_name='unknown-'+fcpu.get('cpu_abi','') cpu_uid=features.get('cpu_uid','') gpu_name=fgpu.get('name','') gpgpu_name='' gpgpu_uid='' sn=fos.get('serial_number','') # Prepare high-level experiment meta meta={'cpu_name':cpu_name, 'cpu_abi':cpu_abi, 'os_name':os_name, 'plat_name':plat_name, 'gpu_name':gpu_name, 'gpgpu_name':gpgpu_name, 'crowd_uid':crowd_uid} # Load scenario and update meta r=ck.access({'action':'load', 'module_uoa':cfg['module_deps']['experiment.scenario.mobile'], 'data_uoa':crowd_uid}) if r['return']>0: return r xd=r['dict'] meta['engine']=xd.get('engine','') meta['model']=xd.get('model','') mmeta=copy.deepcopy(meta) # Extra meta which is not used to search similar case ... mmeta['platform_uid']=plat_uid mmeta['os_uid']=os_uid mmeta['cpu_uid']=cpu_uid mmeta['gpu_uid']=gpu_uid mmeta['gpgpu_uid']=gpgpu_uid # Generate behavior UID rx=ck.gen_uid({}) if rx['return']>0: return rx buid=rx['data_uid'] raw_results['user']=email raw_results['behavior_uid']=buid # Check if already exists duid='' ddd={} ii={'action':'search', 'module_uoa':work['self_module_uid'], 'repo_uoa':ck.cfg.get('record_local_repo_uoa',''), 'search_dict':{'meta':meta}, 'add_meta':'yes'} rx=ck.access(ii) if rx['return']>0: return rx lst=rx['lst'] if len(lst)==1: duid=lst[0]['data_uid'] ddd=lst[0]['meta'] else: rx=ck.gen_uid({}) if rx['return']>0: return rx duid=rx['data_uid'] # We keep time1,2,3 just for compatibility with the first beta version t=raw_results.get('time',[]) tx=raw_results.get('time1',None) if tx!=None: t.append(tx) tx=raw_results.get('time2',None) if tx!=None: t.append(tx) tx=raw_results.get('time3',None) if tx!=None: t.append(tx) raw_results['time']=t # Check XOpenME xopenme={} yopenme=raw_results.get('xopenme',[]) for xx in yopenme: for k in xx: v=xx[k] if k not in xopenme: xopenme[k]=[] if v!=None: xopenme[k].append(v) raw_results['xopenme']=xopenme # Record freq (not checking if changed at this stage) raw_results['cpu_freqs_before']=[cfb] raw_results['cpu_freqs_after']=[cfa] # Process freq before and freq after (for now no any intelligence) fb=raw_results # Process results results=ddd.get('all_raw_results',[]) # Check if already exists with this image topology found=False for q in results: if (q.get('image_height',None)==raw_results.get('image_height',None) and \ q.get('image_width',None)==raw_results.get('image_width',None)) or \ (q.get('image_height',None)==raw_results.get('image_width',None) and \ q.get('image_width',None)==raw_results.get('image_height',None)): t=q.get('time',[]) for tx in raw_results.get('time',[]): t.append(tx) q['time']=t tkk1=raw_results.get('xopenme',{}) tkk2=q.get('xopenme',{}) for tk in tkk1: if tk not in tkk2: tkk2[tk]=[] for tx in tkk1[tk]: if tx!=None: tkk2[tk].append(tx) q['xopenme']=tkk2 fb=q.get('cpu_freqs_before',[]) fb.append(cfb) q['cpu_freqs_before']=fb fa=q.get('cpu_freqs_after',[]) fa.append(cfa) q['cpu_freqs_before']=fa buid=q.get('behavior_uid','') found=True break if not found: results.append(raw_results) ddd['all_raw_results']=results xmeta=ddd.get('meta',{}) xmeta.update(mmeta) ddd['meta']=xmeta # Update meta rx=ck.access({'action':'update', 'module_uoa':work['self_module_uid'], 'data_uoa':duid, 'repo_uoa':ck.cfg.get('record_local_repo_uoa',''), 'dict':ddd, 'substitute':'yes', 'sort_keys':'yes'}) if rx['return']>0: return rx # Prepare url with results rx=ck.access({'action':'form_url_prefix', 'module_uoa':'wfe'}) if rx['return']>0: return rx url=rx['url']+'&action=index&module_uoa=wfe&native_action=show&native_module_uoa=program.optimization&scenario='+work['self_module_uid']+'&highlight_behavior_uid='+buid return {'return':0, 'status':'Results successfully added to Collective Knowledge (UID='+duid+')!', 'data_uid':duid, 'behavior_uid':buid, 'result_url':url} ############################################################################## # record unexpected behavior def process_unexpected_behavior(i): """ Input: { } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ import os duid=i.get('data_uid','') buid=i.get('behavior_uid','') cuid=i.get('crowd_uid','') rres=i.get('raw_results','') ca=i.get('correct_answer','') file_base64=i.get('file_base64','') # Find data r=ck.access({'action':'load', 'module_uoa':work['self_module_uid'], 'data_uoa':duid}) if r['return']>0: return r d=r['dict'] p=r['path'] # Find behavior found=False arr=d.get('all_raw_results',[]) for q in arr: if q.get('behavior_uid','')==buid: found=True break if not found: return {'return':1, 'error':'can\'t find behavior '+buid+' in entry '+duid} # Generate UID for the file with unexpected behavior rx=ck.gen_uid({}) if rx['return']>0: return rx ff='misprediction-image-'+rx['data_uid']+'.jpg' pf=os.path.join(p,ff) mp=q.get('mispredictions',[]) qq={} qq['misprediction_results']=rres qq['mispredicted_image']=ff qq['correct_answer']=ca mp.append(qq) q['mispredictions']=mp # Record file rx=ck.convert_upload_string_to_file({'file_content_base64':file_base64, 'filename':pf}) if rx['return']>0: return rx # Update entry (should add lock in the future for parallel processing) r=ck.access({'action':'update', 'module_uoa':work['self_module_uid'], 'data_uoa':duid, 'dict':d, 'sort_keys':'yes', 'substitute':'yes', 'ignore_update':'yes'}) if r['return']>0: return r return {'return':0} ############################################################################## # generate scenario to crowdsource benchmarking/optimization of DNN engines/models on Android devices def generate(i): """ Input: { (prune_target_os) - prune generated scenarios by this target OS (ABI) (prune_engine) - prune generated scenarios by this engine } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ import os import copy import shutil import itertools o=i.get('out','') p_tos=i.get('prune_target_os','') p_engine=i.get('prune_engine','') # Get platform params ck.out('Detecting host platform info ...') ck.out('') i['action']='detect' i['module_uoa']=cfg['module_deps']['platform'] i['out']='' rpp=ck.access(i) if rpp['return']>0: return rpp hos=rpp['host_os_uoa'] hosd=rpp['host_os_dict'] # Search all scenarios r=ck.access({'action':'search', 'module_uoa':cfg['module_deps']['experiment.scenario.mobile'], 'add_meta':'yes'}) if r['return']>0: return r lst=r['lst'] if len(lst)==0: return {'return':1, 'error':'no pre-recorded crowd-scenarios found ...'} uids={} found=False engine_meta={} engine_state='' # If engine changes, clean up found uids ... # Go through required engines (CPU,OpenCL) and ABIs, and compile classification with related libs ... for prog,b in list(itertools.product(cfg['prog_uoa'],abis)): abi=b['abi'] os_uoa=b['os_uoa'] if i.get('target_os','')!='': os_uoa=i['target_os'] engine=prog['engine'] prog_uoa=prog['program_uoa'] engine_key=prog['engine-deps-key'] engine_lib=prog.get('engine-lib','') # Check if should prune if p_tos!='' and p_tos!=os_uoa: continue if p_engine!='' and p_engine!=engine: continue if engine!=engine_state: uids={} found=False engine_state=engine ck.out(line) ck.out('Preparing "'+engine+'" engine with "'+abi+'" ABI for crowd-benchmarking and crowd-tuning ...') ck.out('') r=ck.inp({'text':'Press Enter to generate this scenario or N to skip this scenario: '}) s=r['string'] if s!='': continue # Compile classification (should compile all deps) ck.out('') ck.out('Compiling classification (and libs) ...') ii={'action':'compile', 'module_uoa':cfg['module_deps']['program'], 'data_uoa':prog_uoa, 'host_os':hos, 'target_os':os_uoa, 'speed':'yes', 'out':o} r=ck.access(ii) if r['return']>0: return r # Get various info cc=r.get('characteristics',{}) cs=cc.get('compilation_success','') if cs!='yes': return {'return':1, 'error':'compilation failed'} md5=cc.get('md5_sum','') bs=cc.get('binary_size',0) misc=r.get('misc',{}) pf=misc.get('path','') td=misc.get('tmp_dir','') of=misc.get('target_exe','') pv=misc.get('program_version','') pp=os.path.join(pf,td,of) if not os.path.isfile(pp): return {'return':1, 'error':'target binary not found ('+pp+')'} # Get info about deps deps=r.get('deps',{}) # Get info about lib if needed cdeps=deps.get(engine_key,{}) cfp='' lmd5='' ls=0 if engine_lib!='': # Force .so (sometimes points to .a) cfp=cdeps.get('cus',{}).get('full_path','') cfp=os.path.join(os.path.dirname(cfp),engine_lib) if os.path.isfile(cfp): r=ck.run_and_get_stdout({'cmd':hosd['md5sum']+' '+cfp, 'shell':'no'}) if r['return']>0: return r sto=r['stdout'].split(' ') if len(sto)==0: return {'return':1, 'error':'can\'t get MD5 of '+engine_lib} lmd5=sto[0] # MD5 of caffe lib ls=os.path.getsize(cfp) # xopenme odeps=deps.get('xopenme',{}) ofp=odeps.get('cus',{}).get('full_path','') # Force .so (sometimes points to .a) ofp=os.path.join(os.path.dirname(ofp),libxopenme) if not os.path.isfile(ofp): return {'return':1, 'error':'xopenme plugin not found ('+ofp+')'} r=ck.run_and_get_stdout({'cmd':hosd['md5sum']+' '+ofp, 'shell':'no'}) if r['return']>0: return r sto=r['stdout'].split(' ') if len(sto)==0: return {'return':1, 'error':'can\'t get MD5 of librtlxopenme.so'} omd5=sto[0] # MD5 of caffe lib ops=os.path.getsize(ofp) # Get versions of all deps dv={} for x in deps: dv[x]={} cx=deps[x].get('cus',{}) dv[x]['version']=cx.get('version','') dv[x]['revision']=cx.get('git_info',{}).get('revision','') dv[x]['iso_datetime_cut_revision']=cx.get('git_info',{}).get('iso_datetime_cut_revision','') dv[x]['tags']=deps[x].get('tags',[]) # Prepare info pack about this experiment meta={'classification_version':pv, 'deps':dv, 'host_os':hos, 'target_os':os_uoa} # Search caffe original code with a given ABI ck.out('') ck.out('Checking / updating scenario files ...') changed_files=[] # Which libs were substituted for q in lst: duid=q['data_uid'] duoa=q['data_uoa'] ruoa=q['repo_uid'] d=q['meta'] if d.get('outdated','')=='yes': # skip archived entries continue if d.get('engine','')!=engine: continue pe=q['path'] # Path to entry files=d.get('files',[]) # Process main binary files (in code directory) for ff in files: fduoa=ff.get('from_data_uoa','') if fduoa=='': sabi=ff.get('supported_abi',[]) if abi in sabi: fn=ff.get('filename','') p=ff.get('path','') xfs=ff.get('file_size',0) xmd5=ff.get('md5','') pep=os.path.join(pe,p,fn) if (fn==engine_lib and (not os.path.isfile(pep) or (xmd5!=lmd5 and xfs!=ls))) or \ (fn=='classification' and (not os.path.isfile(pep) or (xmd5!=md5 and xfs!=bs))) or \ (fn==libxopenme and (not os.path.isfile(pep) or (xmd5!=omd5 and xfs!=ops))): if not found: # If first time, tell that old scenario will be removed! ck.out('') ck.out('WARNING: we found OUTDATED binaries for this scenario') ck.out(' and plan to remove them - make sure that you archived them!') ck.out('') r=ck.inp({'text':'Would you like to proceed and remove outdated binaries (Y/n): '}) if r['return']>0: return r s=r['string'].strip().lower() if s!='': return {'return':0} found=True ck.out('') ck.out('Updating '+fn+' in '+duoa+' ('+p+')') oduid=p.split('/')[1] nduid=uids.get(oduid,'') # Generate new UID if nduid=='': r=ck.gen_uid({}) if r['return']>0: return r nduid=r['data_uid'] uids[oduid]=nduid ck.out(' * New code UID for '+oduid+' : '+nduid) else: ck.out(' * Reusing UID for '+oduid+' : '+nduid) np=os.path.join(pe,'code',nduid,abi) ck.out(' * New path: '+np) # Create new dir and copy file if not os.path.isdir(np): os.makedirs(np) fnp=os.path.join(np,fn) if fn==engine_lib: shutil.copy(cfp, fnp) elif fn=='classification': shutil.copy(pp, fnp) else: shutil.copy(ofp, fnp) # Remove old path px=os.path.join(pe,p) if os.path.isdir(px): ck.out(' * Removing dir: '+px) shutil.rmtree(px) # If whole directory is empty, remove it too px=os.path.join(pe,'code',oduid) if os.path.isdir(px) and len(os.listdir(px))==0: ck.out(' * Removing dir: '+px) shutil.rmtree(px) # Changing meta zp=np if not zp.startswith('code') and not zp.startswith('data'): j1=zp.find('code/') if j1<0: j1=zp.find('data/') if j1>0: zp=zp[j1:] changed_before={'filename':fn, 'from_data_uoa':duid, 'path':p, 'file_size':xfs, 'md5':xmd5} changed_after={'filename':fn, 'from_data_uoa':duid, 'path':zp} if fn==engine_lib: changed_after['file_size']=ls changed_after['md5']=lmd5 elif fn=='classification': changed_after['file_size']=bs changed_after['md5']=md5 else: changed_after['file_size']=ops changed_after['md5']=omd5 ff.update(changed_after) del(ff['from_data_uoa']) # Updating global meta of engine engine_meta[abi]={'program_version':pv, 'deps_versions':dv} d['engine_meta']=engine_meta # Add to changed changed_files.append({'before':changed_before, 'after':changed_after}) else: ck.out('') ck.out('*** Scenario binary '+fn+' is up to date! ***') # If changed original files, change them in all other meta if len(changed_files)>0: ck.out('') ck.out('Updating all scenarios with new files ...') ck.out('') for q in lst: duid=q['data_uid'] duoa=q['data_uoa'] muid=q['module_uid'] ruoa=q['repo_uid'] d=q['meta'] if d.get('outdated','')=='yes': # skip archived entries continue if d.get('engine','')!=engine: continue ck.out(' * '+duoa) d['engine_meta']=engine_meta pp=q['path'] # Path to entry files=d.get('files',[]) for ff in files: fduoa=ff.get('from_data_uoa','') if fduoa!='': # Check if needs to be changed ... for c in changed_files: before=c['before'] after=c['after'] rx=ck.compare_dicts({'dict1':ff, 'dict2':before}) if rx['return']>0: return rx if rx['equal']=='yes': ff.update(after) # Update entry now (aggregating all above changes) r=ck.access({'action':'update', 'module_uoa':muid, 'data_uoa':duid, 'repo_uoa':ruoa, 'dict':d, 'substitute':'yes', 'ignore_update':'yes', 'sort_keys':'yes'}) if r['return']>0: return r return {'return':0}