From aa67840d45c9d4f03c84c6e899790431622891db Mon Sep 17 00:00:00 2001 From: Emery Hemingway Date: Fri, 20 Oct 2023 19:55:29 +0100 Subject: [PATCH] Cannonicalize output --- .gitignore | 1 + Tupfile | 2 -- Tuprules.tup | 1 + lock.json | 2 +- nim_lk.nimble | 4 +-- src/nim_lk.nim | 77 +++++++++++++++++++++++++++++--------------------- 6 files changed, 50 insertions(+), 37 deletions(-) create mode 100644 .gitignore delete mode 100644 Tupfile diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..8454dc7 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/nim.cfg diff --git a/Tupfile b/Tupfile deleted file mode 100644 index 67a1a21..0000000 --- a/Tupfile +++ /dev/null @@ -1,2 +0,0 @@ -include_rules -: |> !nim_lk |> diff --git a/Tuprules.tup b/Tuprules.tup index 89e7278..4d162e6 100644 --- a/Tuprules.tup +++ b/Tuprules.tup @@ -1 +1,2 @@ NIM_FLAGS += --path:$(TUP_CWD)/../nim/v1.6.8 +NIM_FLAGS += --path:$(TUP_CWD)/../preserves-nim/src diff --git a/lock.json b/lock.json index 4cbc05e..ceef1fd 100644 --- a/lock.json +++ b/lock.json @@ -1 +1 @@ -{"depends":[]} +{"depends":[{"method":"fetchzip","packages":["npeg"],"path":"/nix/store/ffkxmjmigfs7zhhiiqm0iw2c34smyciy-source","ref":"1.2.1","rev":"26d62fdc40feb84c6533956dc11d5ee9ea9b6c09","sha256":"0xpzifjkfp49w76qmaylan8q181bs45anmp46l4bwr3lkrr7bpwh","srcDir":"src","url":"https://github.com/zevv/npeg/archive/26d62fdc40feb84c6533956dc11d5ee9ea9b6c09.tar.gz"},{"method":"fetchzip","packages":["preserves"],"path":"/nix/store/nrxd0z8mxmdphw49c6p4n9lmmq0iq5pq-source","ref":"20231019","rev":"a2dc5becc0596d52ab205d869b7c167c0b562fb4","sha256":"09jygr7ynzh6vp2p54dgq2qz651d3lgvypkjwjp74zzp3jgwz7g5","srcDir":"src","url":"https://git.syndicate-lang.org/ehmry/preserves-nim/archive/a2dc5becc0596d52ab205d869b7c167c0b562fb4.tar.gz"}]} diff --git a/nim_lk.nimble b/nim_lk.nimble index 5fdd024..aa8ea94 100644 --- a/nim_lk.nimble +++ b/nim_lk.nimble @@ -3,6 +3,6 @@ bin = @["nim_lk"] description = "Tool for generating Nim lockfiles" license = "BSD-3-Clause" srcDir = "src" -version = "20231009" +version = "20231020" -requires "nim >= 2.0.0" +requires "nim >= 2.0.0", "preserves >= 20231020" diff --git a/src/nim_lk.nim b/src/nim_lk.nim index 2b7f65f..aee1b16 100644 --- a/src/nim_lk.nim +++ b/src/nim_lk.nim @@ -8,10 +8,13 @@ import nimblepkg/common, nimblepkg/version import std/[algorithm, deques, httpclient, json, os, osproc, parseutils, streams, strutils, uri] +import preserves const githubPackagesUrl = "https://raw.githubusercontent.com/nim-lang/packages/master/packages.json" +type Preserve = preserves.Preserve[void] + proc registryCachePath: string = result = getEnv("XDG_CACHE_HOME") if result == "": @@ -78,8 +81,17 @@ proc matchRev(url: string; wanted: VersionRange): tuple[tag: string, rev: string result = pairs[pairs.high] doAssert result.rev != "", url -proc collectMetadata(data: JsonNode) = - let storePath = data["path"].getStr +proc `[]`(dict: Preserve; key: string): Preserve = + dict[key.toPreserve] + +proc `[]=`(dict: var Preserve; key: string; val: Preserve) = + dict[key.toPreserve] = val + +proc `[]=`(dict: var Preserve; key: string; val: string) = + dict[key.toPreserve] = val.toPreserve + +proc collectMetadata(data: var Preserve) = + let storePath = data["path"].string var packageNames = newSeq[string]() for (kind, path) in walkDir(storePath): if kind in {pcFile, pcLinkToFile} and path.endsWith(".nimble"): @@ -88,13 +100,13 @@ proc collectMetadata(data: JsonNode) = if packageNames.len == 0: quit("no .nimble files found in " & storePath) sort(packageNames) - data["packages"] = %packageNames + data["packages"] = packageNames.toPreserve(void) var nimbleFilePath = findNimbleFile(storePath, true) pkg = readPackageInfo(nimbleFilePath, parseCmdLine()) - data["srcDir"] = %pkg.srcDir + data["srcDir"] = pkg.srcDir.toPreserve -proc prefetchGit(uri: Uri; version: VersionRange): JsonNode = +proc prefetchGit(uri: Uri; version: VersionRange): Preserve = var uri = uri subdir = "" @@ -128,14 +140,14 @@ proc prefetchGit(uri: Uri; version: VersionRange): JsonNode = off.inc parseUntil(lines, hash, {'\n'}, off).succ off.inc parseUntil(lines, storePath, {'\n'}, off).succ doAssert off == lines.len, "unrecognized nix-prefetch-url output:\n" & lines - result = newJObject() - result["method"] = %"fetchzip" - result["path"] = %storePath - result["rev"] = %rev - result["sha256"] = %hash - result["url"] = %archiveUrl + result = initDictionary() + result["method"] = "fetchzip" + result["path"] = storePath + result["rev"] = rev + result["sha256"] = hash + result["url"] = archiveUrl if subdir != "": - result["subdir"] = %* subdir + result["subdir"] = subdir else: stderr.writeLine "fetch of ", archiveUrl, " returned ", resp.code var args = @["--quiet", "--fetch-submodules", "--url", cloneUrl, "--rev", rev] @@ -144,26 +156,26 @@ proc prefetchGit(uri: Uri; version: VersionRange): JsonNode = "nix-prefetch-git", args = args, options = {poUsePath}) - try: result = parseJson dump - except JsonParsingError: + try: result = parsePreserves dump + except CatchableError: stderr.writeLine "failed to parse output of nix-prefetch-git ", join(args, " ") quit(dump) if subdir != "": - result["subdir"] = %* subdir - result["method"] = %"git" + result["subdir"] = subdir + result["method"] = "git" if tag != "": - result["ref"] = %tag + result["ref"] = tag collectMetadata(result) -proc containsPackageUri(lockAttrs: JsonNode; pkgUri: string): bool = +proc containsPackageUri(lockAttrs: Preserve; pkgUri: string): bool = for e in lockAttrs.items: - if e["url"].getStr == pkgUri: + if e["url".toPreserve].string == pkgUri: return true -proc containsPackage(lockAttrs: JsonNode; pkgName: string): bool = +proc containsPackage(lockAttrs: Preserve; pkgName: string): bool = for e in lockAttrs.items: for other in e["packages"].items: - if pkgName == other.getStr: + if pkgName == other.string: return true proc collectRequires(pending: var Deque[PkgTuple]; pkgPath: string) = @@ -205,16 +217,16 @@ proc getPackgeUri(name: string): tuple[uri: string, meth: string] = quit("Failed to parse shit JSON " & $e) inc i -proc generateLockfile(): JsonNode = - result = newJObject() +proc generateLockfile(): Preserve = + result = initDictionary() var - deps = newJArray() + deps = initSequence() pending: Deque[PkgTuple] collectRequires(pending, getCurrentDir()) while pending.len > 0: let batchLen = pending.len for i in 1..batchLen: - var pkgData: JsonNode + var pkgData: Preserve let pkg = pending.popFirst() if pkg.name == "nim" or pkg.name == "compiler": continue @@ -227,12 +239,12 @@ proc generateLockfile(): JsonNode = pkgData = prefetchGit(uri, pkg.ver) else: quit("unhandled URI " & $uri) - collectRequires(pending, pkgData["path"].getStr) - deps.add pkgData + collectRequires(pending, pkgData["path"].string) + deps.sequence.add pkgData if batchLen == pending.len: var - pkgData: JsonNode + pkgData: Preserve pkg = pending.popFirst() info = getPackgeUri(pkg.name) uri = parseUri info.uri @@ -241,13 +253,14 @@ proc generateLockfile(): JsonNode = pkgData = prefetchGit(uri, pkg.ver) else: quit("unhandled fetch method " & $info.meth & " for " & info.uri) - collectRequires(pending, pkgData["path"].getStr) - deps.add pkgData - sort(deps.elems) - result["depends"] = deps + collectRequires(pending, pkgData["path"].string) + deps.sequence.add pkgData + sort(deps.sequence) + result["depends".toPreserve] = deps proc main = var lockInfo = generateLockfile() + cannonicalize(lockInfo) stdout.writeLine lockInfo main()