Add "Try Dhall" project (#739)

This adds three new Nix build products:

* `try-dhall-static` - The static assets for "Try Dhall"
* `try-dhall-server` - A script which serves the static assets for local
   debugging
* `tarball-try-dhall` - A tarball of the static assets

This is powered by a new small `dhall-try` package which is also included
in this change.
This commit is contained in:
Gabriel Gonzalez 2018-12-06 18:00:03 -08:00 committed by GitHub
parent bce7230c3e
commit cf69f5a953
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 908 additions and 21 deletions

View File

@ -85,6 +85,39 @@ And each of them with `stack build <package-name>`, for example:
$ stack build dhall-json
```
## Build and serve the "Try Dhall" website
This is currently only supported for Nix on Linux
You can build the static assets by running:
```bash
$ nix-build --attr try-dhall-static
```
For local testing you can open the generated `./result/index.html` directly in
your browser and everything will work except for relative imports of the
Prelude.
To also test imports you can build the `try-dhall-server` script which uses
`warp` to serve the static assets:
```bash
$ nix-build --attr try-dhall-server
$ result/bin/try-dhall-server
Serving directory /nix/store/i7x86qs888rndrhvw92y69jd5xaji060-try-dhall-static on port 3000 with ["index.html","index.htm"] index files.
```
... and then open `http://localhost:3000` in your browser. You will then be
able to import things from the Prelude within your Dhall configuration using
a path like `http://localhost:3000/Prelude/List/map`. Similarly, if you host
this on some public-facing domain such as `https://try.dhall-lang.org` then you
can import the Prelude using `https://try.dhall-lang.org/Prelude/List/map`.
Note that imports from other domains (such as `prelude.dhall-lang.org`) will not
yet work since cross-origin resource-sharing (CORS) is not enabled. This is why
the "Try Dhall" server serves its own copy of the Prelude.
## Contributing
Read the following guide if you would like to contribute:

View File

@ -1,5 +1,10 @@
let
shared = import ./nix/shared.nix {};
shared_ghcjs = import ./nix/shared.nix { compiler = "ghcjs"; };
in
{ inherit (shared) dhall dhall-bash dhall-json dhall-text; }
{ inherit (shared) dhall dhall-bash dhall-json dhall-text;
inherit (shared_ghcjs) dhall-try try-dhall-server try-dhall-static;
}

30
dhall-try/LICENSE Normal file
View File

@ -0,0 +1,30 @@
Copyright (c) 2018, Gabriel Gonzalez
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Gabriel Gonzalez nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

2
dhall-try/Setup.hs Normal file
View File

@ -0,0 +1,2 @@
import Distribution.Simple
main = defaultMain

1
dhall-try/default.nix Normal file
View File

@ -0,0 +1 @@
(import ../nix/shared.nix { compiler = "ghcjs"; }).dhall-try

25
dhall-try/dhall-try.cabal Normal file
View File

@ -0,0 +1,25 @@
name: dhall-try
version: 1.0.0
synopsis: Try Dhall in a browser
-- description:
homepage: https://github.com/dhall-lang/dhall-haskell
license: BSD3
license-file: LICENSE
author: Gabriel Gonzalez
maintainer: Gabriel439@gmail.com
copyright: 2018 Gabriel Gonzalez
category: Web
build-type: Simple
cabal-version: >=1.10
executable dhall-try
main-is: Main.hs
build-depends: base >= 4.11.0.0 && < 5
, dhall >= 1.19.0 && < 1.20
, prettyprinter >= 1.2.1 && < 1.3
, text >= 1.2.3.0 && < 1.3
, ghcjs-base >= 0.2.0.0 && < 0.3
hs-source-dirs: src
default-language: Haskell2010
ghc-options: -Wall
cpp-options: -DGHCJS_BROWSER

54
dhall-try/index.html Normal file
View File

@ -0,0 +1,54 @@
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<style>
* {
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
}
#title {
text-align: center;
}
.CodeMirror {
margin-left: auto;
margin-right: auto;
margin-top: 20px;
margin-bottom: 20px;
outline: 1px solid black;
width: 86ch;
height: 24em;
}
</style>
<link rel="stylesheet" href="./css/codemirror.css">
<script language="javascript" src="./js/codemirror.js"></script>
<script language="javascript" src="./js/haskell.js"></script>
<script language="javascript" src="./js/rts.js"></script>
<script language="javascript" src="./js/lib.js"></script>
<script language="javascript" src="./js/out.js"></script>
</head>
<body>
<a href="https://dhall-lang.org"><img src="./img/dhall-logo.png" height="50px"></a>
<h1 id="title">Try the Dhall configuration language</h1>
<textarea id="dhall-input">Natural/even (1 + 1)</textarea>
<textarea id="dhall-output"></textarea>
</body>
<script>
var dhallInput = document.getElementById("dhall-input");
var dhallOutput = document.getElementById("dhall-output");
var input = CodeMirror.fromTextArea(dhallInput, {
lineNumbers: true,
mode: "haskell"
});
var output = CodeMirror.fromTextArea(dhallOutput, {
lineNumbers: true,
mode: "haskell"
});
</script>
<script language="javascript" src="./js/runmain.js" defer></script>
</html>

1
dhall-try/shell.nix Normal file
View File

@ -0,0 +1 @@
(import ../nix/shared.nix { compiler = "ghcjs"; }).shell-dhall-try

74
dhall-try/src/Main.hs Normal file
View File

@ -0,0 +1,74 @@
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified Control.Exception
import qualified Data.JSString
import qualified Data.Text
import qualified Data.Text.Prettyprint.Doc as Pretty
import qualified Data.Text.Prettyprint.Doc.Render.Text as Pretty
import qualified Dhall.Core
import qualified Dhall.Import
import qualified Dhall.Parser
import qualified Dhall.Pretty
import qualified Dhall.TypeCheck
import qualified GHCJS.Foreign.Callback
import Control.Exception (SomeException)
import Data.JSString (JSString)
import Data.Text (Text)
import Dhall.Core (Expr(..))
import GHCJS.Foreign.Callback (Callback)
foreign import javascript unsafe "input.getValue()" getInput :: IO JSString
foreign import javascript unsafe "input.on('change', $1)" registerCallback :: Callback (IO ()) -> IO ()
foreign import javascript unsafe "output.setValue($1)" setOutput :: JSString -> IO ()
fixup :: Text -> Text
fixup = Data.Text.replace "\ESC[1;31mError\ESC[0m" "Error"
main :: IO ()
main = do
let prettyExpression =
Pretty.renderStrict
. Pretty.layoutSmart Dhall.Pretty.layoutOpts
. Dhall.Pretty.prettyExpr
let callback :: IO ()
callback = do
inputJSString <- getInput
let inputString = Data.JSString.unpack inputJSString
let inputText = Data.Text.pack inputString
outputText <- case Dhall.Parser.exprFromText "(input)" inputText of
Left exception -> do
return (Data.Text.pack (show exception))
Right parsedExpression -> do
eitherResolvedExpression <- Control.Exception.try (Dhall.Import.load parsedExpression)
case eitherResolvedExpression of
Left exception -> do
return (Data.Text.pack (show (exception :: SomeException)))
Right resolvedExpression -> do
case Dhall.TypeCheck.typeOf resolvedExpression of
Left exception -> do
return (Data.Text.pack (show exception))
Right inferredType -> do
let normalizedExpression =
Dhall.Core.normalize resolvedExpression
return (prettyExpression (Annot normalizedExpression inferredType))
let outputString = Data.Text.unpack (fixup outputText)
let outputJSString = Data.JSString.pack outputString
setOutput outputJSString
callback
async <- GHCJS.Foreign.Callback.asyncCallback callback
registerCallback async
return ()

View File

@ -338,6 +338,8 @@ Library
Build-Depends: semigroups == 0.18.*
Build-Depends: transformers == 0.4.2.*
Build-Depends: fail == 4.9.*
if impl(ghcjs)
Build-Depends: ghcjs-xhr
Exposed-Modules:
Dhall,

View File

@ -1609,8 +1609,8 @@ normalizeWithM ctx e0 = loop (denote e0)
t' <- loop t
if boundedType t' then strict else lazy
where
strict = strictLoop n0
lazy = loop ( lazyLoop n0)
strict = strictLoop (fromIntegral n0 :: Integer)
lazy = loop ( lazyLoop (fromIntegral n0 :: Integer))
strictLoop !0 = loop zero
strictLoop !n = App succ' <$> strictLoop (n - 1) >>= loop

View File

@ -13,14 +13,19 @@ import Data.Dynamic (fromDynamic, toDyn)
import Data.Semigroup ((<>))
import Lens.Family.State.Strict (zoom)
import qualified Control.Exception
import qualified Control.Monad.Trans.State.Strict as State
import qualified Data.Text as Text
import qualified Data.Text.Lazy
import qualified Data.Text.Lazy.Encoding
import Dhall.Import.Types
#ifdef __GHCJS__
import qualified JavaScript.XHR
#else
import qualified Control.Exception
import qualified Data.Text.Lazy
import qualified Data.Text.Lazy.Encoding
#endif
#if MIN_VERSION_http_client(0,5,0)
import Network.HTTP.Client
(HttpException(..), HttpExceptionContent(..), Manager)
@ -111,6 +116,18 @@ fetchFromHttpUrl
:: String
-> Maybe [(CI ByteString, ByteString)]
-> StateT (Status m) IO (String, Text.Text)
#ifdef __GHCJS__
fetchFromHttpUrl url Nothing = do
(statusCode, body) <- liftIO (JavaScript.XHR.get (Text.pack url))
case statusCode of
200 -> return ()
_ -> fail (url <> " returned a non-200 status code: " <> show statusCode)
return (url, body)
fetchFromHttpUrl _ _ = do
fail "Dhall does not yet support custom headers when built using GHCJS"
#else
fetchFromHttpUrl url mheaders = do
m <- needManager
@ -134,3 +151,4 @@ fetchFromHttpUrl url mheaders = do
case Data.Text.Lazy.Encoding.decodeUtf8' bytes of
Left err -> liftIO (Control.Exception.throwIO err)
Right text -> return (url, Data.Text.Lazy.toStrict text)
#endif

17
nix/npm/default.nix Normal file
View File

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.6.0. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-6_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv python2 utillinux runCommand writeTextFile;
inherit nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl fetchgit;
inherit nodeEnv;
}

542
nix/npm/node-env.nix Normal file
View File

@ -0,0 +1,542 @@
# This file originates from node2nix
{stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}:
let
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
includeDependencies = {dependencies}:
stdenv.lib.optionalString (dependencies != [])
(stdenv.lib.concatMapStrings (dependency:
''
# Bundle the dependencies of the package
mkdir -p node_modules
cd node_modules
# Only include dependencies if they don't exist. They may also be bundled in the package.
if [ ! -e "${dependency.name}" ]
then
${composePackage dependency}
fi
cd ..
''
) dependencies);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
''
DIR=$(pwd)
cd $TMPDIR
unpackFile ${src}
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/${packageName}")"
if [ -f "${src}" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -print0 | xargs -0 chmod u+x
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/${packageName}"
elif [ -d "${src}" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash ${src})"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/${packageName}"
fi
# Unset the stripped name to not confuse the next unpack step
unset strippedName
# Include the dependencies of the package
cd "$DIR/${packageName}"
${includeDependencies { inherit dependencies; }}
cd ..
${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${stdenv.lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(packageLock.lockfileVersion !== 1) {
process.stderr.write("Sorry, I only understand lock file version 1!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ stdenv.lib.optional (stdenv.isLinux) utillinux
++ stdenv.lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${stdenv.lib.optionalString bypassCache ''
if [ ! -f package-lock.json ]
then
echo "No package-lock.json file found, reconstructing..."
node ${reconstructPackageLock}
fi
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
if [ "$dontNpmInstall" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
fi
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
nodeDependencies = stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ stdenv.lib.optional (stdenv.isLinux) utillinux
++ stdenv.lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${stdenv.lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
cd ..
${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
source $pinpointDependenciesScriptPath
cd ${packageName}
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
export HOME=$PWD
${stdenv.lib.optionalString bypassCache ''
if [ ! -f package-lock.json ]
then
echo "No package-lock.json file found, reconstructing..."
node ${reconstructPackageLock}
fi
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild
${stdenv.lib.optionalString (!dontNpmInstall) ''
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install
''}
cd ..
${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = stdenv.lib.optionalString (dependencies != []) ''
export NODE_PATH=$nodeDependencies/lib/node_modules
'';
};
in
{
buildNodeSourceDist = stdenv.lib.makeOverridable buildNodeSourceDist;
buildNodePackage = stdenv.lib.makeOverridable buildNodePackage;
buildNodeShell = stdenv.lib.makeOverridable buildNodeShell;
}

26
nix/npm/node-packages.nix Normal file
View File

@ -0,0 +1,26 @@
# This file has been generated by node2nix 1.6.0. Do not edit!
{nodeEnv, fetchurl, fetchgit, globalBuildInputs ? []}:
let
sources = {};
in
{
codemirror = nodeEnv.buildNodePackage {
name = "codemirror";
packageName = "codemirror";
version = "5.42.0";
src = fetchurl {
url = "https://registry.npmjs.org/codemirror/-/codemirror-5.42.0.tgz";
sha512 = "pbApC8zDzItP3HRphD6kQVwS976qB5Qi0hU3MZMixLk+AyugOW1RF+8XJEjeyl5yWsHNe88tDUxzeRh5AOxPRw==";
};
buildInputs = globalBuildInputs;
meta = {
description = "Full-featured in-browser code editor";
homepage = https://codemirror.net/;
license = "MIT";
};
production = true;
bypassCache = false;
};
}

View File

@ -15,6 +15,13 @@ let
builtins.listToAttrs (map toNameValue names);
overlayShared = pkgsNew: pkgsOld: {
dhall-logo =
pkgsNew.fetchurl {
url = "https://raw.githubusercontent.com/dhall-lang/dhall-lang/8bab26f9515cc1007025e0ab4b4e7dd6e95a7103/img/dhall-logo.png";
sha256 = "0j6sfvm4kxqb2m6s1sv9qag7m30cibaxpphprhaibp9s9shpra4p";
};
dhall-sdist =
let
predicate = path: type:
@ -66,9 +73,11 @@ let
dontCheckExtension =
mass pkgsNew.haskell.lib.dontCheck [
"aeson"
"base-compat-batteries"
"comonad"
"distributive"
"doctest"
"Glob"
"half"
"http-types"
"megaparsec"
@ -110,10 +119,16 @@ let
{ };
dhall-text =
haskellPackagesNew.callCabal2nix
haskellPackagesNew.callCabal2nix
"dhall-text"
../dhall-text
{ };
dhall-try =
haskellPackagesNew.callCabal2nix
"dhall-try"
../dhall-try
{ };
};
in
@ -129,6 +144,41 @@ let
);
};
};
npm = pkgsNew.callPackage ./npm { };
try-dhall-static = pkgsNew.runCommand "try-dhall-static" {} ''
${pkgsNew.coreutils}/bin/mkdir $out
${pkgsNew.coreutils}/bin/mkdir $out/{css,img,js}
${pkgsNew.coreutils}/bin/cp ${../dhall-try/index.html} $out/index.html
${pkgsNew.coreutils}/bin/ln --symbolic ${pkgsNew.npm.codemirror}/lib/node_modules/codemirror/lib/codemirror.js $out/js
${pkgsNew.coreutils}/bin/ln --symbolic ${pkgsNew.npm.codemirror}/lib/node_modules/codemirror/mode/haskell/haskell.js $out/js
${pkgsNew.coreutils}/bin/ln --symbolic ${pkgsNew.npm.codemirror}/lib/node_modules/codemirror/lib/codemirror.css $out/css
${pkgsNew.coreutils}/bin/ln --symbolic ${pkgsNew.dhall-logo} $out/img/dhall-logo.png
${pkgsNew.coreutils}/bin/ln --symbolic ${pkgsNew.dhall.prelude} $out/Prelude
${pkgsNew.coreutils}/bin/ln --symbolic ${pkgsNew.haskell.packages.ghcjs.dhall-try}/bin/dhall-try.jsexe/{lib,out,rts,runmain}.js $out/js/
${pkgsNew.coreutils}/bin/mkdir $out/nix-support
${pkgsNew.coreutils}/bin/echo "doc none $out/index.html" > $out/nix-support/hydra-build-products
'';
tarball-try-dhall = pkgsStaticLinux.releaseTools.binaryTarball rec {
src = pkgsNew.try-dhall-static;
installPhase = ''
releaseName=try-dhall
${pkgsNew.coreutils}/bin/install --target-directory "$TMPDIR/inst/try-dhall/" -D $src/index.html
${pkgsNew.coreutils}/bin/install --target-directory "$TMPDIR/inst/try-dhall/img" -D $src/img/*
${pkgsNew.coreutils}/bin/install --target-directory "$TMPDIR/inst/try-dhall/css" -D $src/css/*
${pkgsNew.coreutils}/bin/install --target-directory "$TMPDIR/inst/try-dhall/js" -D $src/js/*
'';
};
try-dhall-server = pkgsNew.writeScriptBin "try-dhall-server" ''
${pkgsNew.haskellPackages.wai-app-static}/bin/warp --docroot ${pkgsNew.try-dhall-static}
'';
};
overlayCabal2nix = pkgsNew: pkgsOld: {
@ -370,29 +420,35 @@ let
'';
};
toShell = drv:
if compiler == "ghcjs"
then
# `doctest` doesn't work with `ghcjs`
(pkgs.haskell.lib.dontCheck drv).env
else
# Benchmark dependencies aren't added by default
(pkgs.haskell.lib.doBenchmark drv).env;
in
rec {
inherit pwd;
tarball-dhall = makeTarball "dhall";
tarball-dhall = makeTarball "dhall" ;
tarball-dhall-bash = makeTarball "dhall-bash";
tarball-dhall-json = makeTarball "dhall-json";
tarball-dhall-text = makeTarball "dhall-text";
inherit (pkgs.haskell.packages."${compiler}") dhall dhall-bash dhall-json dhall-text;
inherit (pkgs) tarball-try-dhall try-dhall-server try-dhall-static;
inherit (pkgs.haskell.packages."${compiler}") dhall dhall-bash dhall-json dhall-text dhall-try;
inherit (pkgs.releaseTools) aggregate;
shell-dhall = (pkgs.haskell.lib.doBenchmark pkgs.haskell.packages."${compiler}".dhall).env;
shell-dhall-bash = (pkgs.haskell.lib.doBenchmark pkgs.haskell.packages."${compiler}".dhall-bash).env;
shell-dhall-json = (pkgs.haskell.lib.doBenchmark pkgs.haskell.packages."${compiler}".dhall-json).env;
shell-dhall-text = (pkgs.haskell.lib.doBenchmark pkgs.haskell.packages."${compiler}".dhall-text).env;
shell-dhall = toShell pkgs.haskell.packages."${compiler}".dhall ;
shell-dhall-bash = toShell pkgs.haskell.packages."${compiler}".dhall-bash;
shell-dhall-json = toShell pkgs.haskell.packages."${compiler}".dhall-json;
shell-dhall-text = toShell pkgs.haskell.packages."${compiler}".dhall-text;
shell-dhall-try = toShell pkgs.haskell.packages."${compiler}".dhall-try ;
test-dhall =
pkgs.mkShell

View File

@ -41,8 +41,7 @@ in
shared.tarball-dhall-json
shared.tarball-dhall-text
# Verify that `dhall` can be built using GHCJS
shared_ghcjs.dhall
shared_ghcjs.tarball-try-dhall
# This is the only `dhall` build that runs the test suite
coverage.dhall
@ -53,6 +52,8 @@ in
"coverage-dhall" = coverage.dhall;
inherit (shared_ghcjs) tarball-try-dhall;
inherit (shared)
tarball-dhall
tarball-dhall-bash