11from collections import OrderedDict
2+ import hashlib
23import logging
34import os
45import re
@@ -816,7 +817,7 @@ def _create_task_cache_dir(self, task_id):
816817 pass
817818 return task_cache_dir
818819
819- def _perform_api_call (self , call , data = None , file_path = None , add_authentication = True ):
820+ def _perform_api_call (self , call , data = None , file_dictionary = None , add_authentication = True ):
820821 ############################################################################
821822 # Runs
822823 def get_runs_list (self , task_id = None , flow_id = None , setup_id = None ):
@@ -989,16 +990,16 @@ def _read_url(self, url, add_authentication=False, data=None, filePath=None):
989990 if not url .endswith ("/" ):
990991 url += "/"
991992 url += call
992- return self ._read_url (url , data = data , file_path = file_path )
993+ return self ._read_url (url , data = data , file_dictionary = file_dictionary )
993994
994- def _read_url (self , url , data = None , file_path = None ):
995+ def _read_url (self , url , data = None , file_dictionary = None ):
995996 if data is None :
996997 data = {}
997998 data ['api_key' ] = self .config .get ('FAKE_SECTION' , 'apikey' )
998999
999- if file_path is not None :
1000+ if file_dictionary is not None :
10001001 file_elements = {}
1001- for key , path in file_path .items ():
1002+ for key , path in file_dictionary .items ():
10021003 if os .path .isabs (path ) and os .path .exists (path ):
10031004 try :
10041005 if key is 'dataset' :
@@ -1065,7 +1066,7 @@ def upload_dataset(self, description, file_path=None):
10651066 try :
10661067 data = {'description' : description }
10671068 if file_path is not None :
1068- return_code , dataset_xml = self ._perform_api_call ("/data/" ,data = data , file_path = {'dataset' :file_path })
1069+ return_code , dataset_xml = self ._perform_api_call ("/data/" ,data = data , file_dictionary = {'dataset' : file_path })
10691070
10701071 except URLError as e :
10711072 # TODO logger.debug
@@ -1076,25 +1077,29 @@ def upload_dataset(self, description, file_path=None):
10761077 def upload_flow (self , description , file_path = None ):
10771078 try :
10781079 data = {'description' : description }
1079- return_code , dataset_xml = self ._perform_api_call ("/flow/" , data = data , file_path = {'source' :file_path })
1080+ return_code , dataset_xml = self ._perform_api_call ("/flow/" , data = data , file_dictionary = {'source' : file_path })
10801081
10811082 except URLError as e :
10821083 # TODO logger.debug
10831084 print (e )
10841085 raise e
10851086 return return_code , dataset_xml
10861087
1087- def upload_run (self , description , files ):
1088- try :
1089- data = {'description' : description }
1090- for key , value in files :
1091- data [key ] = value
1088+ def upload_run (self , files ):
1089+ file_dictionary = {}
1090+ if 'predictions' in files :
1091+ try :
1092+ for key , value in files .items ():
1093+ file_dictionary [key ] = value
10921094
1093- return_code , dataset_xml = self ._perform_api_call ("openml.run.upload" , data = data )
1095+ return_code , dataset_xml = self ._perform_api_call ("/run/" , file_dictionary = file_dictionary )
1096+
1097+ except URLError as e :
1098+ # TODO logger.debug
1099+ print (e )
1100+ raise e
1101+ return return_code , dataset_xml
1102+ else :
1103+ raise ValueError ("prediction files doesn't exist" )
10941104
1095- except URLError as e :
1096- # TODO logger.debug
1097- print (e )
1098- raise e
1099- return return_code , dataset_xml
11001105
0 commit comments