Commit ef993da9 authored by Roman Yurchak's avatar Roman Yurchak

Package scikit-learn

parent fd6e0bf4
......@@ -24,6 +24,12 @@ jobs:
name: build
no_output_timeout: 1200
command: |
# download scipy package from https://github.com/iodide-project/pyodide/pull/211
mkdir -p build
wget -q -O build/scipy.js https://1463-122663163-gh.circle-artifacts.com/0/home/circleci/repo/build/scipy.js
wget -q -O build/scipy.data https://1463-122663163-gh.circle-artifacts.com/0/home/circleci/repo/build/scipy.data
ccache -z
make
ccache -s
......
package:
name: scikit-learn
version: 0.20.0
source:
url: https://pypi.io/packages/source/s/scikit-learn/scikit-learn-0.20.0.tar.gz
sha256: 97d1d971f8ec257011e64b7d655df68081dd3097322690afa1a71a1d755f8c18
patches:
#- patches/unvendor-joblib.patch
- patches/use-site-joblib.patch
build:
cflags: -Wno-implicit-function-declaration
requirements:
run:
- numpy # TODO: add scipy, joblib once the corresponding PRs are merged
test:
imports:
- sklearn
- sklearn.cluster
- sklearn.compose
- sklearn.covariance
- sklearn.cross_decomposition
- sklearn.datasets
- sklearn.decomposition
commit 16cf9dc5f79533a121a421b095b6e7ef9ee76e9c
Author: Roman Yurchak <rth.yurchak@pm.me>
Date: Thu Oct 25 16:56:54 2018 +0200
Use site joblib
diff --git a/sklearn/datasets/species_distributions.py b/sklearn/datasets/species_distributions.py
index 6d8acddcc..8191048d7 100644
--- a/sklearn/datasets/species_distributions.py
+++ b/sklearn/datasets/species_distributions.py
@@ -51,7 +51,7 @@ from .base import _fetch_remote
from .base import RemoteFileMetadata
from ..utils import Bunch
from sklearn.datasets.base import _pkl_filepath
-from sklearn.externals import joblib
+import joblib
PY3_OR_LATER = sys.version_info[0] >= 3
diff --git a/sklearn/ensemble/tests/test_forest.py b/sklearn/ensemble/tests/test_forest.py
index d7586c286..d1168cdcf 100644
--- a/sklearn/ensemble/tests/test_forest.py
+++ b/sklearn/ensemble/tests/test_forest.py
@@ -23,7 +23,11 @@ import pytest
from sklearn.utils import parallel_backend
from sklearn.utils import register_parallel_backend
-from sklearn.externals.joblib.parallel import LokyBackend
+try:
+ from sklearn.externals.joblib.parallel import LokyBackend
+except ImportError:
+ LokyBackend = object
+
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
diff --git a/sklearn/metrics/tests/test_score_objects.py b/sklearn/metrics/tests/test_score_objects.py
index da04b4215..fc3f6a6b1 100644
--- a/sklearn/metrics/tests/test_score_objects.py
+++ b/sklearn/metrics/tests/test_score_objects.py
@@ -40,7 +40,7 @@ from sklearn.datasets import load_diabetes
from sklearn.model_selection import train_test_split, cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.multiclass import OneVsRestClassifier
-from sklearn.externals import joblib
+import joblib
REGRESSION_SCORERS = ['explained_variance', 'r2',
diff --git a/sklearn/neighbors/tests/test_kde.py b/sklearn/neighbors/tests/test_kde.py
index 990942c9e..e9a6c31bd 100644
--- a/sklearn/neighbors/tests/test_kde.py
+++ b/sklearn/neighbors/tests/test_kde.py
@@ -10,7 +10,7 @@ from sklearn.pipeline import make_pipeline
from sklearn.datasets import make_blobs
from sklearn.model_selection import GridSearchCV
from sklearn.preprocessing import StandardScaler
-from sklearn.externals import joblib
+import joblib
def compute_kernel_slow(Y, X, kernel, h):
diff --git a/sklearn/tests/test_site_joblib.py b/sklearn/tests/test_site_joblib.py
index bffd43cc1..df4c96893 100644
--- a/sklearn/tests/test_site_joblib.py
+++ b/sklearn/tests/test_site_joblib.py
@@ -1,7 +1,10 @@
import os
import pytest
from sklearn import externals
-from sklearn.externals import joblib as joblib_vendored
+try:
+ from sklearn.externals import joblib as joblib_vendored
+except ImportError:
+ joblib_vendored = None
from sklearn.utils import Parallel, delayed, Memory, parallel_backend
if os.environ.get('SKLEARN_SITE_JOBLIB', False):
diff --git a/sklearn/utils/testing.py b/sklearn/utils/testing.py
index 75b378961..b81b9ab58 100644
--- a/sklearn/utils/testing.py
+++ b/sklearn/utils/testing.py
@@ -44,7 +44,7 @@ except NameError:
import sklearn
from sklearn.base import BaseEstimator
-from sklearn.externals import joblib
+import joblib
from sklearn.utils.fixes import signature
from sklearn.utils import deprecated, IS_PYPY, _IS_32BIT
diff --git a/sklearn/utils/tests/test_estimator_checks.py b/sklearn/utils/tests/test_estimator_checks.py
index bf8412b3e..2eebb36b0 100644
--- a/sklearn/utils/tests/test_estimator_checks.py
+++ b/sklearn/utils/tests/test_estimator_checks.py
@@ -5,7 +5,7 @@ import numpy as np
import scipy.sparse as sp
from sklearn.externals.six.moves import cStringIO as StringIO
-from sklearn.externals import joblib
+import joblib
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.utils import deprecated
from textwrap import dedent
def test_scikit_learn(selenium_standalone):
selenium = selenium_standalone
# no automatic depedency resolution for now
selenium.load_package(["numpy", "joblib"])
selenium.load_package("scipy")
selenium.load_package("scikit-learn")
assert selenium.run("""
import numpy as np
import sklearn
from sklearn.linear_model import LogisticRegression
rng = np.random.RandomState(42)
X = rng.rand(100, 20)
y = rng.randint(5, size=100)
estimator = LogisticRegression(solver='liblinear')
estimator.fit(X, y)
print(estimator.predict(X))
estimator.score(X, y)
""") > 0
print(selenium.logs)
def test_import(selenium_standalone):
selenium = selenium_standalone
# no automatic depedency resolution for now
selenium.load_package(["numpy", "joblib"])
selenium.load_package("scipy")
selenium.load_package("scikit-learn")
cmd = dedent("""
import sklearn
import sklearn.calibration
import sklearn.calibration
import sklearn.cluster
import sklearn.compose
import sklearn.covariance
import sklearn.cross_decomposition
import sklearn.datasets
import sklearn.decomposition
import sklearn.discriminant_analysis
import sklearn.dummy
import sklearn.ensemble
import sklearn.exceptions
import sklearn.externals
import sklearn.feature_extraction
import sklearn.feature_selection
import sklearn.gaussian_process
import sklearn.impute
import sklearn.isotonic
import sklearn.kernel_approximation
import sklearn.kernel_ridge
import sklearn.linear_model
import sklearn.manifold
import sklearn.metrics
import sklearn.mixture
import sklearn.model_selection
import sklearn.multiclass
import sklearn.multioutput
import sklearn.naive_bayes
import sklearn.neighbors
import sklearn.neural_network
import sklearn.pipeline
import sklearn.preprocessing
import sklearn.random_projection
import sklearn.semi_supervised
import sklearn.svm
import sklearn.tree
import sklearn.utils
""").splitlines()
for line in cmd:
try:
selenium.run(line)
print(f'{line} -- OK')
except:
print(f'Error: {line} failed')
print(selenium.logs)
......@@ -23,7 +23,7 @@ def registered_packages_meta():
for name in packages}
UNSUPPORTED_PACKAGES = {'chrome': ['pandas', 'scipy'],
UNSUPPORTED_PACKAGES = {'chrome': ['pandas', 'scipy', 'scikit-learn'],
'firefox': []}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment