Cannonicalize output

This commit is contained in:
Ehmry - 2023-10-20 19:55:29 +01:00
parent 4df3728a62
commit aa67840d45
6 changed files with 50 additions and 37 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/nim.cfg

View File

@ -1,2 +0,0 @@
include_rules
: |> !nim_lk |>

View File

@ -1 +1,2 @@
NIM_FLAGS += --path:$(TUP_CWD)/../nim/v1.6.8
NIM_FLAGS += --path:$(TUP_CWD)/../preserves-nim/src

View File

@ -1 +1 @@
{"depends":[]}
{"depends":[{"method":"fetchzip","packages":["npeg"],"path":"/nix/store/ffkxmjmigfs7zhhiiqm0iw2c34smyciy-source","ref":"1.2.1","rev":"26d62fdc40feb84c6533956dc11d5ee9ea9b6c09","sha256":"0xpzifjkfp49w76qmaylan8q181bs45anmp46l4bwr3lkrr7bpwh","srcDir":"src","url":"https://github.com/zevv/npeg/archive/26d62fdc40feb84c6533956dc11d5ee9ea9b6c09.tar.gz"},{"method":"fetchzip","packages":["preserves"],"path":"/nix/store/nrxd0z8mxmdphw49c6p4n9lmmq0iq5pq-source","ref":"20231019","rev":"a2dc5becc0596d52ab205d869b7c167c0b562fb4","sha256":"09jygr7ynzh6vp2p54dgq2qz651d3lgvypkjwjp74zzp3jgwz7g5","srcDir":"src","url":"https://git.syndicate-lang.org/ehmry/preserves-nim/archive/a2dc5becc0596d52ab205d869b7c167c0b562fb4.tar.gz"}]}

View File

@ -3,6 +3,6 @@ bin = @["nim_lk"]
description = "Tool for generating Nim lockfiles"
license = "BSD-3-Clause"
srcDir = "src"
version = "20231009"
version = "20231020"
requires "nim >= 2.0.0"
requires "nim >= 2.0.0", "preserves >= 20231020"

View File

@ -8,10 +8,13 @@ import nimblepkg/common,
nimblepkg/version
import std/[algorithm, deques, httpclient, json, os, osproc, parseutils, streams, strutils, uri]
import preserves
const githubPackagesUrl =
"https://raw.githubusercontent.com/nim-lang/packages/master/packages.json"
type Preserve = preserves.Preserve[void]
proc registryCachePath: string =
result = getEnv("XDG_CACHE_HOME")
if result == "":
@ -78,8 +81,17 @@ proc matchRev(url: string; wanted: VersionRange): tuple[tag: string, rev: string
result = pairs[pairs.high]
doAssert result.rev != "", url
proc collectMetadata(data: JsonNode) =
let storePath = data["path"].getStr
proc `[]`(dict: Preserve; key: string): Preserve =
dict[key.toPreserve]
proc `[]=`(dict: var Preserve; key: string; val: Preserve) =
dict[key.toPreserve] = val
proc `[]=`(dict: var Preserve; key: string; val: string) =
dict[key.toPreserve] = val.toPreserve
proc collectMetadata(data: var Preserve) =
let storePath = data["path"].string
var packageNames = newSeq[string]()
for (kind, path) in walkDir(storePath):
if kind in {pcFile, pcLinkToFile} and path.endsWith(".nimble"):
@ -88,13 +100,13 @@ proc collectMetadata(data: JsonNode) =
if packageNames.len == 0:
quit("no .nimble files found in " & storePath)
sort(packageNames)
data["packages"] = %packageNames
data["packages"] = packageNames.toPreserve(void)
var
nimbleFilePath = findNimbleFile(storePath, true)
pkg = readPackageInfo(nimbleFilePath, parseCmdLine())
data["srcDir"] = %pkg.srcDir
data["srcDir"] = pkg.srcDir.toPreserve
proc prefetchGit(uri: Uri; version: VersionRange): JsonNode =
proc prefetchGit(uri: Uri; version: VersionRange): Preserve =
var
uri = uri
subdir = ""
@ -128,14 +140,14 @@ proc prefetchGit(uri: Uri; version: VersionRange): JsonNode =
off.inc parseUntil(lines, hash, {'\n'}, off).succ
off.inc parseUntil(lines, storePath, {'\n'}, off).succ
doAssert off == lines.len, "unrecognized nix-prefetch-url output:\n" & lines
result = newJObject()
result["method"] = %"fetchzip"
result["path"] = %storePath
result["rev"] = %rev
result["sha256"] = %hash
result["url"] = %archiveUrl
result = initDictionary()
result["method"] = "fetchzip"
result["path"] = storePath
result["rev"] = rev
result["sha256"] = hash
result["url"] = archiveUrl
if subdir != "":
result["subdir"] = %* subdir
result["subdir"] = subdir
else:
stderr.writeLine "fetch of ", archiveUrl, " returned ", resp.code
var args = @["--quiet", "--fetch-submodules", "--url", cloneUrl, "--rev", rev]
@ -144,26 +156,26 @@ proc prefetchGit(uri: Uri; version: VersionRange): JsonNode =
"nix-prefetch-git",
args = args,
options = {poUsePath})
try: result = parseJson dump
except JsonParsingError:
try: result = parsePreserves dump
except CatchableError:
stderr.writeLine "failed to parse output of nix-prefetch-git ", join(args, " ")
quit(dump)
if subdir != "":
result["subdir"] = %* subdir
result["method"] = %"git"
result["subdir"] = subdir
result["method"] = "git"
if tag != "":
result["ref"] = %tag
result["ref"] = tag
collectMetadata(result)
proc containsPackageUri(lockAttrs: JsonNode; pkgUri: string): bool =
proc containsPackageUri(lockAttrs: Preserve; pkgUri: string): bool =
for e in lockAttrs.items:
if e["url"].getStr == pkgUri:
if e["url".toPreserve].string == pkgUri:
return true
proc containsPackage(lockAttrs: JsonNode; pkgName: string): bool =
proc containsPackage(lockAttrs: Preserve; pkgName: string): bool =
for e in lockAttrs.items:
for other in e["packages"].items:
if pkgName == other.getStr:
if pkgName == other.string:
return true
proc collectRequires(pending: var Deque[PkgTuple]; pkgPath: string) =
@ -205,16 +217,16 @@ proc getPackgeUri(name: string): tuple[uri: string, meth: string] =
quit("Failed to parse shit JSON " & $e)
inc i
proc generateLockfile(): JsonNode =
result = newJObject()
proc generateLockfile(): Preserve =
result = initDictionary()
var
deps = newJArray()
deps = initSequence()
pending: Deque[PkgTuple]
collectRequires(pending, getCurrentDir())
while pending.len > 0:
let batchLen = pending.len
for i in 1..batchLen:
var pkgData: JsonNode
var pkgData: Preserve
let pkg = pending.popFirst()
if pkg.name == "nim" or pkg.name == "compiler":
continue
@ -227,12 +239,12 @@ proc generateLockfile(): JsonNode =
pkgData = prefetchGit(uri, pkg.ver)
else:
quit("unhandled URI " & $uri)
collectRequires(pending, pkgData["path"].getStr)
deps.add pkgData
collectRequires(pending, pkgData["path"].string)
deps.sequence.add pkgData
if batchLen == pending.len:
var
pkgData: JsonNode
pkgData: Preserve
pkg = pending.popFirst()
info = getPackgeUri(pkg.name)
uri = parseUri info.uri
@ -241,13 +253,14 @@ proc generateLockfile(): JsonNode =
pkgData = prefetchGit(uri, pkg.ver)
else:
quit("unhandled fetch method " & $info.meth & " for " & info.uri)
collectRequires(pending, pkgData["path"].getStr)
deps.add pkgData
sort(deps.elems)
result["depends"] = deps
collectRequires(pending, pkgData["path"].string)
deps.sequence.add pkgData
sort(deps.sequence)
result["depends".toPreserve] = deps
proc main =
var lockInfo = generateLockfile()
cannonicalize(lockInfo)
stdout.writeLine lockInfo
main()