Build Julia on NixOS

Does it work now for you?

The shellHook is an option when building with pkgs.stdenv.mkDerivation.

And sure here is my whole mess of a nix expression I use for my julia environment.

with import <nixpkgs> {};

let
  
  unstable = import <nixos-unstable> {};
  IJulia = "/home/christoph/.julia/packages/IJulia/DrVMH";
  config = { display_name = "Julia Nix";
              argv = [
                "${julia}/bin/julia" 
                "-i"
                "--startup-file=yes"
                "--color=yes"
                "--project=@."
                "${IJulia}/src/kernel.jl"
                "{connection_file}"
              ];
              language = "julia";
              interrupt_mode = "signal";
            };
  configFile = writeText "kernel.json" (builtins.toJSON config);

  myPackages = pythonPackages: with pythonPackages; [
    # Install python dependencies
    asdf
    cython
    jupyter 
    matplotlib
    numpy
    pyemd
    scipy
    scikitlearn
    setuptools
    ortools
    pandas
    pip
    pyemd

    # coc-vim dependencies
    black
    mypy
    pylama
    pylint

     ];
  python-stuff = python3.withPackages myPackages;

  extraLibs = [
    cudatoolkit
    qt4
    glibc
    cairo
  ];


  libPath = lib.makeLibraryPath [
    qt4 
    gcc9 
    stdenv.cc.cc.lib
  ];

in

# pkgs.mkShell {
  pkgs.stdenv.mkDerivation {
  name = "sandbox-julia";
  buildInputs = with pkgs; [
    julia
    python-stuff
    # Apparently we need a curl install here or otherwise this happens:
    # https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/527
    curl 
    zlib
    zlib.dev 
    zlib.out
    
    cmake
    llvm_8

    cudatoolkit
    git
    gitRepo
    gnumake
    gnupg
    gperf
    libGLU
    linuxPackages.nvidia_x11
    m4
    ncurses5
    procps
    unzip
    utillinux
    xorg.libX11
    xorg.libXext
    xorg.libXi
    xorg.libXmu
    xorg.libXrandr
    xorg.libXv
    zlib
 
    neovim 
     ];
  shellHook = ''
    WORKING_DIR=$PWD
    echo $WORKING_DIR
    # CUDA shell hooks
    export CUDA_PATH=${pkgs.cudatoolkit}
    export EXTRA_LDFLAGS="-L/lib -L${pkgs.linuxPackages.nvidia_x11}/lib"
    export EXTRA_CCFLAGS="-I/usr/include"

    # Stuff for julia jupyter kernel
    # julia -e 'using Pkg; Pkg.add("IJulia")'
    
    # Python stuff for coc-vim
    rm -f env
    ln -s ${python-stuff}/bin env

    rm -f env_julia
    ln -s ${julia} env_julia

    # Set PYTHONPATH so that PyCall in julia finds the relevant packages
    export PYTHONPATH=${python-stuff}/lib/python3.7/site-packages/ 

    # Setup a local pip build directory
    alias pip="PIP_PREFIX='$(pwd)/_build/pip_packages' \pip"
    export PYTHONPATH="$(pwd)/_build/pip_packages/lib/python3.7/site-packages:$PYTHONPATH"
    unset SOURCE_DATE_EPOCH

    FIRST_RUN=false
    if [ "$FIRST_RUN" = true ]; then 
      # Pip install python pacakges here
      pip install git+https://github.com/rflamary/POT
      pip install gputil

      export CUDA_TOOLKIT_ROOT_DIR=${cudatoolkit}
      export CUDA_ARCH=52
      rm -rf _build/pip_packages/lib/python3.7/site-packages/libKMCUDA
      mkdir -p _build/pip_packages/lib/python3.7/site-packages/libKMCUDA
      git clone --depth=1 https://github.com/src-d/kmcuda $(pwd)/_build/pip_packages/lib/python3.7/site-packages/libKMCUDA
      patch _build/pip_packages/lib/python3.7/site-packages/libKMCUDA/src/setup.py < libKMCUDA_CUDA_ARCH.patch
      pip uninstall -y libKMCUDA
      pip install _build/pip_packages/lib/python3.7/site-packages/libKMCUDA/src
    fi

    # cd $WORKING_DIR
    # Install jupyter extensions
    # Create required directory in case (optional)
    mkdir -p $(jupyter --data-dir)/nbextensions
    # Clone the repository
    cd $(jupyter --data-dir)/nbextensions
    git clone --depth 1 https://github.com/lambdalisue/jupyter-vim-binding vim_binding
    # Activate the extension
    jupyter nbextension enable vim_binding/vim_binding
    # Go back to the working dir
    cd $WORKING_DIR
    
    # manually setup the kernel
    # TODO figure out how to use jupyter-kernel.create
    KERNEL_DIR=~/.local/share/jupyter/kernels/julia
    mkdir -p $KERNEL_DIR
    ln -sf ${configFile} $KERNEL_DIR/kernel.json
    ln -sf ${IJulia}/deps/logo-32x32.png $KERNEL_DIR/logo-32x32.png
    ln -sf ${IJulia}/deps/logo-64x64.png $KERNEL_DIR/logo-64x64.png

    # Julia Threads
    export JULIA_NUM_THREADS=12

    # The Cmake binary fails, so we have to build it from source
    # julia -e 'ENV["CMAKE_JL_BUILD_FROM_SOURCE"] = 1'
    export CMAKE_JL_BUILD_FROM_SOURCE=1

    # Make sure CUDAnative does not use BinaryBuilder
    export JULIA_CUDA_USE_BINARYBUILDER=false

    # julia -e 'using Pkg; Pkg.activate("./"); Pkg.add("GR")'
    # Patch the GKS binary for GR
     patchelf \
     --set-interpreter ${glibc}/lib/ld-linux-x86-64.so.2 \
     --set-rpath "${libPath}" \
     /home/christoph/.julia/packages/GR/cRdXQ/deps/gr/bin/gksqt

    # Configure PyCall to pick up the correct python binary
    # julia -e 'ENV["PYTHON"]="${python-stuff}/bin/python"; using Pkg; Pkg.activate("./"); Pkg.build("PyCall")'

    # Everytime you get a new julia binary, we need to nuke the julia package cache...
    # Maybe?
    # rm -rf /home/christoph/.julia/
  '';
}
1 Like