|
45 | 45 | # file must be in the directory ~/.openml/config and exist prior to |
46 | 46 | # importing the openml module. |
47 | 47 | # * Run the code below, replacing 'YOURKEY' with your API key. |
48 | | - |
| 48 | +# |
| 49 | +# .. warning:: This example uploads data. For that reason, this example |
| 50 | +# connects to the test server instead. This prevents the live server from |
| 51 | +# crowding with example datasets, tasks, studies, and so on. |
49 | 52 | ############################################################################ |
50 | 53 | import openml |
51 | 54 | from sklearn import neighbors |
52 | 55 |
|
53 | | -# Uncomment and set your OpenML key. Don't share your key with others. |
| 56 | +openml.config.start_using_configuration_for_example() |
| 57 | + |
| 58 | +############################################################################ |
| 59 | +# When using the main server, instead make sure your apikey is configured. |
| 60 | +# This can be done with the following line of code (uncomment it!). |
| 61 | +# Never share your apikey with others. |
| 62 | + |
54 | 63 | # openml.config.apikey = 'YOURKEY' |
55 | 64 |
|
56 | 65 | ############################################################################ |
|
80 | 89 | run = openml.runs.run_model_on_task(clf, task, avoid_duplicate_runs=False) |
81 | 90 | # Publish the experiment on OpenML (optional, requires an API key). |
82 | 91 | # For this tutorial, our configuration publishes to the test server |
83 | | -# as to not pollute the main server. |
| 92 | +# as to not crowd the main server with runs created by examples. |
84 | 93 | myrun = run.publish() |
85 | 94 | print("kNN on %s: http://test.openml.org/r/%d" % (data.name, myrun.run_id)) |
| 95 | + |
| 96 | +############################################################################ |
| 97 | +openml.config.stop_using_configuration_for_example() |
0 commit comments