From 2dab4188ec69be8cb41594516cc8a484b984bdc9 Mon Sep 17 00:00:00 2001 From: Fis Trivial Date: Sat, 28 Apr 2018 03:47:03 +0000 Subject: gnu: Add python-autograd MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * gnu/packages/machine-learning.scm (python-autograd, python2-autograd): New variables. Signed-off-by: Ludovic Courtès --- gnu/packages/machine-learning.scm | 44 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) (limited to 'gnu/packages/machine-learning.scm') diff --git a/gnu/packages/machine-learning.scm b/gnu/packages/machine-learning.scm index 12384a1031..f0d35484ea 100644 --- a/gnu/packages/machine-learning.scm +++ b/gnu/packages/machine-learning.scm @@ -6,6 +6,7 @@ ;;; Copyright © 2018 Tobias Geerinckx-Rice ;;; Copyright © 2018 Mark Meyer ;;; Copyright © 2018 Ben Woodcroft +;;; Copyright © 2018 Fis Trivial ;;; ;;; This file is part of GNU Guix. ;;; @@ -688,3 +689,46 @@ mining and data analysis.") (define-public python2-scikit-learn (package-with-python2 python-scikit-learn)) + +(define-public python-autograd + (let* ((commit "442205dfefe407beffb33550846434baa90c4de7") + (revision "0") + (version (git-version "0.0.0" revision commit))) + (package + (name "python-autograd") + (home-page "https://github.com/HIPS/autograd") + (source (origin + (method git-fetch) + (uri (git-reference + (url home-page) + (commit commit))) + (sha256 + (base32 + "189sv2xb0mwnjawa9z7mrgdglc1miaq93pnck26r28fi1jdwg0z4")) + (file-name (git-file-name name version)))) + (version version) + (build-system python-build-system) + (native-inputs + `(("python-nose" ,python-nose) + ("python-pytest" ,python-pytest))) + (propagated-inputs + `(("python-future" ,python-future) + ("python-numpy" ,python-numpy))) + (arguments + `(#:phases (modify-phases %standard-phases + (replace 'check + (lambda _ + (invoke "py.test" "-v")))))) + (synopsis "Efficiently computes derivatives of NumPy code") + (description "Autograd can automatically differentiate native Python and +NumPy code. It can handle a large subset of Python's features, including loops, +ifs, recursion and closures, and it can even take derivatives of derivatives +of derivatives. It supports reverse-mode differentiation +(a.k.a. backpropagation), which means it can efficiently take gradients of +scalar-valued functions with respect to array-valued arguments, as well as +forward-mode differentiation, and the two can be composed arbitrarily. The +main intended application of Autograd is gradient-based optimization.") + (license license:expat)))) + +(define-public python2-autograd + (package-with-python2 python-autograd)) -- cgit v1.2.3