diff --git a/README.md b/README.md index b45f707..7b71d4f 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,7 @@ pytorch-world ============= -The pytorch ecosystem in nix. - -Pytorch-1.x support is still being solidified. The current features are supported: +The pytorch ecosystem in nix. The current features are supported: | **Python-3.6.9 support** | pytorch-v1.1.0 | pytorch-v1.2.0 | | ----------------------------------- | ------------------ | ------------------ | @@ -24,18 +22,32 @@ The ":grey_exclamation:" means that namedtensors and binaries weren't attempted The "*" implies that FBGEMM is not included in any of the "Full" builds in pytorch-v1.1.0, but is included for all of pytorch-v1.2.0 +Releases +============================ + +This repository will tag releases according to the working version of pytorch. +There will also a post-fixed snapshot number when new packages are working. +All releases will only be tested for python-3.6 unless otherwise noted. -Cachix (status: complicated) + +Using this in your projects ============================ -Binaries are to appear at pytorch-world.cachix.org. +Suggested use is to pull in `pytorch-world` as a git submodule under some `nix/` folder and refer to `probtorch/release.nix` for an example of how to depend on this library. +Do not reference `release.nix` files as they don't generate site-packages and only output final python binaries. -Currently, you will find pytorch36, pytorch36-mkl, and pytorch36-cu pushed to cachix. You can get them from this project root with the following: +Cachix +============= + +Binaries can be found at pytorch-world.cachix.org. + +Currently, you will find binaries for the `./release.nix` file which includes pytorch-1.2 related builds (read: no libtorch support). +This covers all CPU builds, which are darwin-accessible, as well as cuda-enabled pytorch built with `cudatoolkit_10_0`. +All of this was built from a NixOS machine with python-3.6 (pin, located under `./pin/`), feel free to file issues to increase support. +You can retrieve any of these with the following: ``` cachix use pytorch-world -nix-build ./pytorch/release.nix -A pytorch36 -A pytorch36-mkl -A pytorch36-cu +nix-build ./release.nix -A pytorch -A pytorch36-mkl -A pytorch36-cu ``` - - diff --git a/probtorch/default.nix b/probtorch/default.nix index c015576..a7c0381 100644 --- a/probtorch/default.nix +++ b/probtorch/default.nix @@ -7,7 +7,7 @@ buildPythonPackage rec { src = fetchFromGitHub { owner = "probtorch"; repo = "probtorch"; - rev = "4b7e1404354c05401fc4132c9cbe0a7e1ac52eb5"; + rev = "develop"; sha256= "1dgrvi0pjnxinaw7r5q9d3gyd40qn3dknm15yygbl105ybaym9i6"; }; diff --git a/probtorch/release.nix b/probtorch/release.nix index 9eab0e2..d3bdb04 100644 --- a/probtorch/release.nix +++ b/probtorch/release.nix @@ -1,25 +1,48 @@ -{ pkgs ? import ../pin/nixpkgs.nix {} }: +{ pkgs ? import ../pin/nixpkgs.nix {}, python ? pkgs.python36 }: let + my_cudatoolkit = pkgs.cudatoolkit_10_0; + my_cudnn = pkgs.cudnn_cudatoolkit_10_0; + my_nccl = pkgs.nccl_cudatoolkit_10; + mklSupport = true; + + my_magma = pkgs.callPackage ../deps/magma_250.nix { + inherit mklSupport; + cudatoolkit = my_cudatoolkit; + }; + mypackageOverrides = gpu: self: super: let - pytorch36-cpu = (self.callPackage ../pytorch/release.nix {}).pytorch36-vanilla; - pytorch36-cu = (self.callPackage ../pytorch/release.nix {}).pytorch36-cu; - pytorch = if gpu then pytorch36-cu else pytorch36-cpu; - probtorch = self.callPackage ./. { inherit (pkgs.python36Packages) buildPythonPackage; inherit pytorch; }; + pytorchFull = self.callPackage ../pytorch { + inherit mklSupport; + openMPISupport = true; openmpi = pkgs.callPackage ../deps/openmpi.nix { }; + buildNamedTensor = true; + buildBinaries = true; + }; + + pytorchWithCuda10Full = self.callPackage ../pytorch { + inherit mklSupport; + openMPISupport = true; openmpi = pkgs.callPackage ../deps/openmpi.nix { cudaSupport = true; cudatoolkit = my_cudatoolkit; }; + cudaSupport = true; cudatoolkit = my_cudatoolkit; cudnn = my_cudnn; nccl = my_nccl; magma = my_magma; + buildNamedTensor = true; + buildBinaries = true; + }; + + pytorch = if gpu then pytorchWithCuda10Full else pytorchFull; + probtorch = self.callPackage ./. { inherit pytorch; }; in { inherit probtorch pytorch; }; - generic36 = { gpu }: + generic = { gpu }: let - mypython = pkgs.python36.override { + mypython = python.override { packageOverrides = mypackageOverrides gpu; self = mypython; }; in mypython.withPackages (ps: [ ps.pytorch ps.probtorch ]); in { - probtorch36-cpu = generic36 { gpu = false; }; - probtorch36-cu = generic36 { gpu = true; }; + probtorch = generic { gpu = false; }; + probtorchWithCuda = generic { gpu = true; }; } diff --git a/release.nix b/release.nix index 0b289ca..d170943 100644 --- a/release.nix +++ b/release.nix @@ -1,18 +1,18 @@ -{ pkgs ? import ./pin/nixpkgs.nix {} }: +{ pkgs ? import ./pin/nixpkgs.nix {}, python ? pkgs.python36 }: let - pytorch-releases = pkgs.callPackage ./pytorch/release.nix { }; - probtorch-releases = pkgs.callPackage ./probtorch/release.nix { }; - libtorch-releases = pkgs.callPackage ./libtorch/release.nix { }; + pytorch-releases = pkgs.callPackage ./pytorch/release.nix { inherit python; }; + probtorch-releases = pkgs.callPackage ./probtorch/release.nix { inherit python; }; in { - inherit (libtorch-releases) libtorch_cpu libtorch_cudatoolkit_10_0; + inherit (pytorch-releases) + # cpu builds + pytorch pytorch-mkl pytorch-openmpi pytorch-mkl-openmpi pytorchFull + # cuda dependencies + magma_250 magma_250mkl + # cuda builds + pytorch-cu pytorch-cu-mkl pytorch-cu-mkl-openmpi pytorchWithCuda10Full + ; - inherit (pytorch-releases) magma_250 pytorch36 pytorch36-mkl-openmpi pytorch36-cu-mkl - pytorch36-openmpi = generic { -pytorch36-mkl-openmpi - pytorch36-cu-mkl-openmpi-implicit = generic { - pytorch36-cu-mkl-openmpi-implicit-extras = generic { - - inherit (probtorch-releases) probtorch36-cpu probtorch36-cu; + inherit (probtorch-releases) probtorch probtorchWithCuda; }