Cannonicalize output

This commit is contained in:
Ehmry - 2023-10-20 19:55:29 +01:00
parent 4df3728a62
commit aa67840d45
6 changed files with 50 additions and 37 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/nim.cfg

View File

@ -1,2 +0,0 @@
include_rules
: |> !nim_lk |>

View File

@ -1 +1,2 @@
NIM_FLAGS += --path:$(TUP_CWD)/../nim/v1.6.8 NIM_FLAGS += --path:$(TUP_CWD)/../nim/v1.6.8
NIM_FLAGS += --path:$(TUP_CWD)/../preserves-nim/src

View File

@ -1 +1 @@
{"depends":[]} {"depends":[{"method":"fetchzip","packages":["npeg"],"path":"/nix/store/ffkxmjmigfs7zhhiiqm0iw2c34smyciy-source","ref":"1.2.1","rev":"26d62fdc40feb84c6533956dc11d5ee9ea9b6c09","sha256":"0xpzifjkfp49w76qmaylan8q181bs45anmp46l4bwr3lkrr7bpwh","srcDir":"src","url":"https://github.com/zevv/npeg/archive/26d62fdc40feb84c6533956dc11d5ee9ea9b6c09.tar.gz"},{"method":"fetchzip","packages":["preserves"],"path":"/nix/store/nrxd0z8mxmdphw49c6p4n9lmmq0iq5pq-source","ref":"20231019","rev":"a2dc5becc0596d52ab205d869b7c167c0b562fb4","sha256":"09jygr7ynzh6vp2p54dgq2qz651d3lgvypkjwjp74zzp3jgwz7g5","srcDir":"src","url":"https://git.syndicate-lang.org/ehmry/preserves-nim/archive/a2dc5becc0596d52ab205d869b7c167c0b562fb4.tar.gz"}]}

View File

@ -3,6 +3,6 @@ bin = @["nim_lk"]
description = "Tool for generating Nim lockfiles" description = "Tool for generating Nim lockfiles"
license = "BSD-3-Clause" license = "BSD-3-Clause"
srcDir = "src" srcDir = "src"
version = "20231009" version = "20231020"
requires "nim >= 2.0.0" requires "nim >= 2.0.0", "preserves >= 20231020"

View File

@ -8,10 +8,13 @@ import nimblepkg/common,
nimblepkg/version nimblepkg/version
import std/[algorithm, deques, httpclient, json, os, osproc, parseutils, streams, strutils, uri] import std/[algorithm, deques, httpclient, json, os, osproc, parseutils, streams, strutils, uri]
import preserves
const githubPackagesUrl = const githubPackagesUrl =
"https://raw.githubusercontent.com/nim-lang/packages/master/packages.json" "https://raw.githubusercontent.com/nim-lang/packages/master/packages.json"
type Preserve = preserves.Preserve[void]
proc registryCachePath: string = proc registryCachePath: string =
result = getEnv("XDG_CACHE_HOME") result = getEnv("XDG_CACHE_HOME")
if result == "": if result == "":
@ -78,8 +81,17 @@ proc matchRev(url: string; wanted: VersionRange): tuple[tag: string, rev: string
result = pairs[pairs.high] result = pairs[pairs.high]
doAssert result.rev != "", url doAssert result.rev != "", url
proc collectMetadata(data: JsonNode) = proc `[]`(dict: Preserve; key: string): Preserve =
let storePath = data["path"].getStr dict[key.toPreserve]
proc `[]=`(dict: var Preserve; key: string; val: Preserve) =
dict[key.toPreserve] = val
proc `[]=`(dict: var Preserve; key: string; val: string) =
dict[key.toPreserve] = val.toPreserve
proc collectMetadata(data: var Preserve) =
let storePath = data["path"].string
var packageNames = newSeq[string]() var packageNames = newSeq[string]()
for (kind, path) in walkDir(storePath): for (kind, path) in walkDir(storePath):
if kind in {pcFile, pcLinkToFile} and path.endsWith(".nimble"): if kind in {pcFile, pcLinkToFile} and path.endsWith(".nimble"):
@ -88,13 +100,13 @@ proc collectMetadata(data: JsonNode) =
if packageNames.len == 0: if packageNames.len == 0:
quit("no .nimble files found in " & storePath) quit("no .nimble files found in " & storePath)
sort(packageNames) sort(packageNames)
data["packages"] = %packageNames data["packages"] = packageNames.toPreserve(void)
var var
nimbleFilePath = findNimbleFile(storePath, true) nimbleFilePath = findNimbleFile(storePath, true)
pkg = readPackageInfo(nimbleFilePath, parseCmdLine()) pkg = readPackageInfo(nimbleFilePath, parseCmdLine())
data["srcDir"] = %pkg.srcDir data["srcDir"] = pkg.srcDir.toPreserve
proc prefetchGit(uri: Uri; version: VersionRange): JsonNode = proc prefetchGit(uri: Uri; version: VersionRange): Preserve =
var var
uri = uri uri = uri
subdir = "" subdir = ""
@ -128,14 +140,14 @@ proc prefetchGit(uri: Uri; version: VersionRange): JsonNode =
off.inc parseUntil(lines, hash, {'\n'}, off).succ off.inc parseUntil(lines, hash, {'\n'}, off).succ
off.inc parseUntil(lines, storePath, {'\n'}, off).succ off.inc parseUntil(lines, storePath, {'\n'}, off).succ
doAssert off == lines.len, "unrecognized nix-prefetch-url output:\n" & lines doAssert off == lines.len, "unrecognized nix-prefetch-url output:\n" & lines
result = newJObject() result = initDictionary()
result["method"] = %"fetchzip" result["method"] = "fetchzip"
result["path"] = %storePath result["path"] = storePath
result["rev"] = %rev result["rev"] = rev
result["sha256"] = %hash result["sha256"] = hash
result["url"] = %archiveUrl result["url"] = archiveUrl
if subdir != "": if subdir != "":
result["subdir"] = %* subdir result["subdir"] = subdir
else: else:
stderr.writeLine "fetch of ", archiveUrl, " returned ", resp.code stderr.writeLine "fetch of ", archiveUrl, " returned ", resp.code
var args = @["--quiet", "--fetch-submodules", "--url", cloneUrl, "--rev", rev] var args = @["--quiet", "--fetch-submodules", "--url", cloneUrl, "--rev", rev]
@ -144,26 +156,26 @@ proc prefetchGit(uri: Uri; version: VersionRange): JsonNode =
"nix-prefetch-git", "nix-prefetch-git",
args = args, args = args,
options = {poUsePath}) options = {poUsePath})
try: result = parseJson dump try: result = parsePreserves dump
except JsonParsingError: except CatchableError:
stderr.writeLine "failed to parse output of nix-prefetch-git ", join(args, " ") stderr.writeLine "failed to parse output of nix-prefetch-git ", join(args, " ")
quit(dump) quit(dump)
if subdir != "": if subdir != "":
result["subdir"] = %* subdir result["subdir"] = subdir
result["method"] = %"git" result["method"] = "git"
if tag != "": if tag != "":
result["ref"] = %tag result["ref"] = tag
collectMetadata(result) collectMetadata(result)
proc containsPackageUri(lockAttrs: JsonNode; pkgUri: string): bool = proc containsPackageUri(lockAttrs: Preserve; pkgUri: string): bool =
for e in lockAttrs.items: for e in lockAttrs.items:
if e["url"].getStr == pkgUri: if e["url".toPreserve].string == pkgUri:
return true return true
proc containsPackage(lockAttrs: JsonNode; pkgName: string): bool = proc containsPackage(lockAttrs: Preserve; pkgName: string): bool =
for e in lockAttrs.items: for e in lockAttrs.items:
for other in e["packages"].items: for other in e["packages"].items:
if pkgName == other.getStr: if pkgName == other.string:
return true return true
proc collectRequires(pending: var Deque[PkgTuple]; pkgPath: string) = proc collectRequires(pending: var Deque[PkgTuple]; pkgPath: string) =
@ -205,16 +217,16 @@ proc getPackgeUri(name: string): tuple[uri: string, meth: string] =
quit("Failed to parse shit JSON " & $e) quit("Failed to parse shit JSON " & $e)
inc i inc i
proc generateLockfile(): JsonNode = proc generateLockfile(): Preserve =
result = newJObject() result = initDictionary()
var var
deps = newJArray() deps = initSequence()
pending: Deque[PkgTuple] pending: Deque[PkgTuple]
collectRequires(pending, getCurrentDir()) collectRequires(pending, getCurrentDir())
while pending.len > 0: while pending.len > 0:
let batchLen = pending.len let batchLen = pending.len
for i in 1..batchLen: for i in 1..batchLen:
var pkgData: JsonNode var pkgData: Preserve
let pkg = pending.popFirst() let pkg = pending.popFirst()
if pkg.name == "nim" or pkg.name == "compiler": if pkg.name == "nim" or pkg.name == "compiler":
continue continue
@ -227,12 +239,12 @@ proc generateLockfile(): JsonNode =
pkgData = prefetchGit(uri, pkg.ver) pkgData = prefetchGit(uri, pkg.ver)
else: else:
quit("unhandled URI " & $uri) quit("unhandled URI " & $uri)
collectRequires(pending, pkgData["path"].getStr) collectRequires(pending, pkgData["path"].string)
deps.add pkgData deps.sequence.add pkgData
if batchLen == pending.len: if batchLen == pending.len:
var var
pkgData: JsonNode pkgData: Preserve
pkg = pending.popFirst() pkg = pending.popFirst()
info = getPackgeUri(pkg.name) info = getPackgeUri(pkg.name)
uri = parseUri info.uri uri = parseUri info.uri
@ -241,13 +253,14 @@ proc generateLockfile(): JsonNode =
pkgData = prefetchGit(uri, pkg.ver) pkgData = prefetchGit(uri, pkg.ver)
else: else:
quit("unhandled fetch method " & $info.meth & " for " & info.uri) quit("unhandled fetch method " & $info.meth & " for " & info.uri)
collectRequires(pending, pkgData["path"].getStr) collectRequires(pending, pkgData["path"].string)
deps.add pkgData deps.sequence.add pkgData
sort(deps.elems) sort(deps.sequence)
result["depends"] = deps result["depends".toPreserve] = deps
proc main = proc main =
var lockInfo = generateLockfile() var lockInfo = generateLockfile()
cannonicalize(lockInfo)
stdout.writeLine lockInfo stdout.writeLine lockInfo
main() main()