forked from input-output-hk/haskell.nix
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdefault.nix
619 lines (559 loc) · 23.8 KB
/
default.nix
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
{ pkgs, stdenv, lib, haskellLib, recurseIntoAttrs, srcOnly }:
with haskellLib;
let
# Why `final.buildPackages.buildPackages.gitMinimal`?
# Why not just final.buildPackages.gitMinimal?
#
# It turns out `git` depends on `gdb` in a round about way:
# git -> openssh -> libfido2 -> systemd -> python libxml -> Cython -> gdb
# Somewhere in that chain there should perhaps be a `buildPackages` so
# that the `gdb` that is used is not the one for debugging code in
# the `final` (but instead the one for debugging code in
# `final.buildPackages`).
#
# Using `final.buildPackages.git` causes two problems:
#
# * Multiple versions of `git` (and that dependency chain
# to `gdb` are needed when cross compiling).
# * When `gdb` does not exist for `js`, so when cross
# compiling with ghcjs `final.buildPackages.git` fails
# to build at all.
inherit (pkgs.buildPackages.buildPackages) gitMinimal;
in {
# Within the package components, these are the attribute names of
# nested attrsets.
subComponentTypes = [
"sublibs"
"foreignlibs"
"exes"
"tests"
"benchmarks"
];
foldrAttrVals = f: z: attrs:
lib.foldr f z (builtins.attrValues attrs);
foldComponents = tys: f: z: conf:
let
comps = conf.components or { };
# ensure that comps.library exists and is not null.
libComp = acc:
if comps ? library then f comps.library acc else acc;
subComps = acc:
lib.foldr
(ty: acc': foldrAttrVals f acc' (comps.${ty} or { }))
acc
tys;
in
libComp (subComps z);
getAllComponents = foldComponents subComponentTypes (c: acc: [ c ] ++ acc) [ ];
componentPrefix = {
sublibs = "lib";
foreignlibs = "flib";
exes = "exe";
tests = "test";
benchmarks = "bench";
};
# For looking up the components attribute based on the cabal component type
prefixComponent =
lib.listToAttrs (
lib.mapAttrsToList (value: name: { inherit name value; })
componentPrefix);
applyComponents = f: config:
let
comps = config.components;
applyLibrary = cname: f { cname = config.package.identifier.name; ctype = "lib"; };
applySubComp = ctype: cname: f { inherit cname; ctype = componentPrefix.${ctype} or (throw "Missing component mapping for ${ctype}."); };
isBuildable = comp:
config.package.buildable # Set manually in a module (allows whole packages to be disabled)
&& comp.buildable # Set based on `buildable` in `.cabal` files
&& comp.planned; # Set if the component was in the `plan.json`
buildableAttrs = lib.filterAttrs (_n: isBuildable);
libComp = if comps.library == null || !(isBuildable comps.library)
then {}
else lib.mapAttrs applyLibrary (removeAttrs comps (subComponentTypes ++ [ "setup" ]));
subComps = lib.mapAttrs
(ctype: attrs: lib.mapAttrs (applySubComp ctype) (buildableAttrs attrs))
(builtins.intersectAttrs (lib.genAttrs subComponentTypes (_: null)) comps);
in subComps // libComp;
isLibrary = componentId: componentId.ctype == "lib";
isExe = componentId: componentId.ctype == "exe";
isTest = componentId: componentId.ctype == "test";
isBenchmark = componentId: componentId.ctype == "bench";
isExecutableType = componentId:
isExe componentId
|| isTest componentId
|| isBenchmark componentId;
mayHaveExecutable = isExecutableType;
# Was there a reference to the package source in the `cabal.project` or `stack.yaml` file.
# This is used to make the default `packages` list for `shellFor`.
isLocalPackage = p: p.isLocal or false;
isRedirectPackage = p: p.isRedirect or false;
selectLocalPackages = lib.filterAttrs (_n: p: p != null && isLocalPackage p && !isRedirectPackage p);
# if it's a project package it has a src attribute set with an origSubDir attribute.
# project packages are a subset of localPackages
isProjectPackage = p: p.isProject or false;
selectProjectPackages = lib.filterAttrs (_n: p: p != null && isLocalPackage p && isProjectPackage p && !isRedirectPackage p);
# Format a componentId as it should appear as a target on the
# command line of the setup script.
componentTarget = componentId:"${componentId.ctype}:${componentId.cname}";
# Remove null or empty values from an attrset.
optionalHooks = lib.filterAttrs (_: hook: hook != null && hook != "");
# Avoid pkgs.callPackage for now. It does a lot of nonsense with OOP
# style programming that we should avoid until we know we want it.
# weakCallPackage: call a function or (importable expression)
# with scope + args.
#
# weakCallPackage scope f args
# will call f (scope // args)
#
# weakCallpackage scope ./path args
# will call the expression at ./path with (scope // args)
#
weakCallPackage = scope: f: args:
let f' = if lib.isFunction f then f else import f;
args' = (builtins.intersectAttrs (builtins.functionArgs f') scope) // args;
in f' args';
# Collect all (transitive) Haskell library dependencies of a
# component.
## flatLibDepends :: Component -> [Package]
flatLibDepends = component:
let
makePairs = map (p: rec { key=val.name; val=p.components.library or p; });
closure = builtins.genericClosure {
startSet = makePairs component.depends;
operator = {val,...}: makePairs val.config.depends;
};
in map ({val,...}: val) closure;
# Extracts a selection of components from a Haskell package set.
#
# This can be used to filter out all test suites or benchmarks of
# your project, so that they can be built in Hydra.
#
# For example:
#
# tests = collectComponents "tests" (package: package.identifier.name == "mypackage") hsPkgs;
#
# Will result in moving:
# from: hsPkgs.mypackage.components.tests.unit-tests
# to: tests.mypackage.unit-tests
#
collectComponents = group: packageSel: haskellPackages:
let packageToComponents = _name: package:
# look for the components with this group if there are any
let components = package.components.${group} or {};
# set recurseForDerivations unless it's a derivation itself (e.g. the "library" component) or an empty set
in if lib.isDerivation components || components == {}
then components
else recurseIntoAttrs components;
packageFilter = _name: package: (package.isHaskell or false) && packageSel package;
filteredPkgs = lib.filterAttrs packageFilter haskellPackages;
# at this point we can filter out packages that don't have any of the given kind of component
packagesByComponent = lib.filterAttrs (_: components: components != {}) (lib.mapAttrs packageToComponents filteredPkgs);
in recurseIntoAttrs packagesByComponent;
# Equivalent to collectComponents with (_: true) as selection function.
# Useful for pre-filtered package-set.
#
# For example:
#
# myHaskellPackages = selectProjectPackages hsPkgs;
# myTests = collectComponents' "tests" myHaskellPackages;
collectComponents' = group: collectComponents group (_: true);
# Extracts a selection of 'checks' from a Haskell package set.
#
# This can be used to collect all the test runs in your project, so that can be run in CI.
collectChecks = packageSel: haskellPackages:
let packageFilter = _name: package: (package.isHaskell or false) && packageSel package;
in recurseIntoAttrs (lib.mapAttrs (_: p: p.checks) (lib.filterAttrs packageFilter haskellPackages));
# Equivalent to collectChecks with (_: true) as selection function.
# Useful for pre-filtered package-set.
collectChecks' = collectChecks (_: true);
# Replacement for lib.cleanSourceWith that has a subDir argument.
inherit (import ./clean-source-with.nix { inherit lib; }) cleanSourceWith;
# Use cleanSourceWith to filter just the files needed for a particular
# component of a package
cleanCabalComponent = import ./clean-cabal-component.nix { inherit lib cleanSourceWith; };
# Clean git directory based on `git ls-files --recurse-submodules`
cleanGit = import ./clean-git.nix {
inherit lib cleanSourceWith;
git = gitMinimal;
inherit (pkgs.buildPackages.buildPackages) runCommand;
};
# Some times it is handy to temporarily use a relative path between git
# repos. If the repos are individually cleaned this is not possible
# (since the cleaned version of one repo will never include the files
# of the other).
#
# `cleanGits` allows us to specify a root directory and any number of
# sub directories containing git repos.
#
# See docs/user-guide/clean-git.md for details of how to use this
# with `cabalProject`.
cleanGits = { src, gitDirs, name ? null, caller ? "cleanGits" }:
let
# List of filters, one for each git directory.
filters = builtins.map (subDir:
(pkgs.haskell-nix.haskellLib.cleanGit {
src = pkgs.haskell-nix.haskellLib.cleanSourceWith {
inherit src subDir;
};
}).filter) gitDirs;
in pkgs.haskell-nix.haskellLib.cleanSourceWith {
inherit src name caller;
# Keep files that match any of the filters
filter = path: type: pkgs.lib.any (f: f path type) filters;
};
# Check a test component
check = import ./check.nix {
inherit stdenv lib haskellLib;
inherit (pkgs) buildPackages;
};
# Do coverage of a package
coverageReport = import ./cover.nix {
inherit stdenv lib haskellLib pkgs;
};
# Do coverage of a project
projectCoverageReport = import ./cover-project.nix {
inherit stdenv lib haskellLib pkgs;
};
# Use `isCrossHost` to identify when we are cross compiling and
# the code we are producing will not run on the build system
# without an emulator.
# In most cases we do not want to treat musl as a cross compiler.
# For instance when building ghc we want to include ghci.
isCrossHost = stdenv.hostPlatform != stdenv.buildPlatform
&& !(stdenv.buildPlatform.isLinux && stdenv.hostPlatform.isMusl && stdenv.buildPlatform.linuxArch == stdenv.hostPlatform.linuxArch);
# This is the same as isCrossHost but for use when building ghc itself
isCrossTarget = stdenv.targetPlatform != stdenv.hostPlatform
&& !(stdenv.hostPlatform.isLinux && stdenv.targetPlatform.isMusl && stdenv.hostPlatform.linuxArch == stdenv.targetPlatform.linuxArch);
# Native musl build-host-target combo
isNativeMusl = stdenv.hostPlatform.isMusl
&& stdenv.buildPlatform == stdenv.hostPlatform
&& stdenv.hostPlatform == stdenv.targetPlatform;
# Takes a version number, module or list of modules (for cabalProject)
# and converts it to an list of project modules. This allows
# the use of "1.0.0.0" or { version = "1.0.0.0"; ... }
versionOrModToMods = versionOrMod:
if lib.isString versionOrMod
then [{ version = versionOrMod; }]
else if lib.isList versionOrMod
then versionOrMod
else [versionOrMod];
# Find the resolver in the stack.yaml file and fetch it if a sha256 value is provided
fetchResolver = import ./fetch-resolver.nix {
inherit (pkgs.buildPackages) pkgs;
};
inherit (import ./cabal-project-parser.nix {
inherit pkgs;
}) parseIndexState parseSourceRepositoryPackages parseRepositories parseSourceRepositoryPackageBlock parseRepositoryBlock;
cabalToNixpkgsLicense = import ./spdx/cabal.nix pkgs;
# This function is like
# `src + (if subDir == "" then "" else "/" + subDir)`
# however when `includeSiblings` is set it maintains
# `src.origSrc` if there is one and instead adds to
# `src.origSubDir`. It uses `cleanSourceWith` when possible
# to keep `cleanSourceWith` support in the result.
appendSubDir = { src, subDir, includeSiblings ? false }:
if subDir == ""
then src
else
haskellLib.cleanSourceWith {
inherit src subDir includeSiblings;
};
# Givin a `src` split it into a `root` path (based on `src.origSrc` if
# present) and `subDir` (based on `src.origSubDir). The
# `root` will still use the `filter` of `src` if there was one.
rootAndSubDir = src: rec {
subDir = src.origSubDir or "";
root =
if subDir == ""
then src # if there was no subdir use the original src
else
# Use `cleanSourceWith` to make sure the `filter` is still used
if src ? origSrc && src ? filter
then haskellLib.cleanSourceWith {
name = src.name or "source" + "-root";
src = src.origSrc;
# Not passing src.origSubDir so that the result points `origSrc`
inherit (src) filter;
}
else src.origSrc or src; # If there is a subDir and origSrc (but no filter) use origSrc
};
# Run evalModules passing the project function argument (m) as a module along with
# the the a projectType module (../modules/cabal-project.nix or ../modules/stack-project.nix).
# The resulting config is then passed to the project function's implementation.
evalProjectModule = projectType: m: f:
let project = f
(lib.evalModules {
modules = (if builtins.isList m then m else [m]) ++ [
# Include ../modules/cabal-project.nix or ../modules/stack-project.nix
(import ../modules/project-common.nix)
(import projectType)
# Pass the pkgs and the buildProject to the modules
({ config, lib, ... }: {
_module.args = {
inherit pkgs;
# to make it easy to depends on build packages in, eg., shell definition:
inherit (project) buildProject;
};
inherit (project) hsPkgs;
})
];
});
in project;
# Converts from a `compoent.depends` value to a library derivation.
# In the case of sublibs the `depends` value should already be the derivation.
dependToLib = d:
# Do not simplify this to `d.components.library or d`, as that
# will not give a good error message if the `.library`
# is missing (happens if the package is unplanned,
# but has overrides).
# It would be nice to put an `assert` here, but there is
# currently no good way to get the name of the dependency
# when it is not in the plan. The attribute path of
# `d` in the `nix` error should include the name
# eg. `packages.Cabal.components.library`.
if d ? components
then d.components.library
else d;
projectOverlays = import ./project-overlays.nix {
inherit lib haskellLib;
};
# Use by `prefixFlake` to add a prefix to every attribute
prefixAttrs = prefix: x:
__listToAttrs (map (n:{
name = prefix + n;
value = x.${n};
}) (__attrNames x));
# Used by `combineFlakes` to add the prefix to each flake
prefixFlake = prefix: sep: flake:
if prefix == "default"
then flake
else
__mapAttrs (_: prefixAttrs (prefix + sep)) flake //
lib.optionalAttrs (flake ? devShell) {
# We can't add the prefix to this
inherit (flake) devShell;
} // lib.optionalAttrs (flake ? devShells) {
devShells = __listToAttrs (map (n: {
# We don't want ":default" on the end of the non
# default dev shells
name = if n == "default"
then prefix
else prefix + sep + n;
value = flake.devShells.${n};
}) (__attrNames flake.devShells));
} // lib.optionalAttrs (flake ? hydraJobs) {
hydraJobs.${lib.removeSuffix ":" prefix} = flake.hydraJobs;
} // lib.optionalAttrs (flake ? ciJobs) {
ciJobs.${lib.removeSuffix ":" prefix} = flake.ciJobs;
};
# Used by `combineFlakes` to combine flakes together
addFlakes = a: b:
__listToAttrs (map (name: {
inherit name;
value =
# This favours the first item (`a`) in the case of duplicates
# so that `combineFlakes` will use the first flake in the
# list for `devShell`.
if name == "devShell"
then a.devShell or b.devShell # `devShell` is a derivation
else
(b.${name} or {}) // (a.${name} or {});
}) (__attrNames (a // b)));
# This function can combine a list of flakes allong with
# suitable prefix values into a single flake.
# Since thre is no way to add a prefix to `devShell`, the first
# one in the list will be used.
combineFlakes = sep: prefixAndFlakes: builtins.foldl' addFlakes {}
(lib.mapAttrsToList (prefix: flake: prefixFlake prefix sep flake) prefixAndFlakes);
# Make the CI jobs for running code coverage.
# `project` is the base project without code coverage enabled.
# `packages` is a selector function that indicates what packages
# we should run code coverage on (pass haskellLib.selectProjectPackages
# to run it on the packages).
# `coverageProjectModule` is applied to `project` and is useful for
# modifying the project settings when running code coverage (just
# pass `{}` if you do not need to modify anything).
# By default the `doCoverage` flag will be set for the packages
# selected by `packages`.
projectCoverageCiJobs = project: packages: coverageProjectModule:
let
packageNames = project: builtins.attrNames (packages project.hsPkgs);
coverageProject = project.appendModule [
coverageProjectModule
{
modules = [{
packages = lib.genAttrs (packageNames project)
(_: { doCoverage = lib.mkDefault true; });
}];
}
];
in
builtins.listToAttrs (lib.concatMap (packageName: [{
name = packageName;
value = coverageProject.hsPkgs.${packageName}.coverageReport;
}]) (packageNames coverageProject));
# Flake package names that are flat and match the cabal component names.
mkFlakePackages =
foldrAttrVals
(package: acc:
foldComponents
subComponentTypes
(component: a: a // {
${component.passthru.identifier.component-id} = component;
})
acc
package)
{ };
# Flake package names that are flat and match the cabal component names.
mkFlakeApps =
foldrAttrVals
(package: acc:
foldComponents
[ "exes" "tests" "benchmarks" ]
(component: a: a // {
${component.passthru.identifier.component-id} = {
type = "app";
program = component.exePath;
};
})
acc
package)
{ };
# Flatten the result of collectChecks or collectChecks' for use in flake `checks`
mkFlakeChecks = allChecks:
foldrAttrVals
(package: acc:
foldrAttrVals
(check: a: a // {
${check.passthru.identifier.component-id} = check;
})
acc
package)
{ }
(removeRecurseForDerivations allChecks);
removeRecurseForDerivations = x:
let clean = builtins.removeAttrs x ["recurseForDerivations"];
in
if x.recurseForDerivations or false
then builtins.mapAttrs (_: removeRecurseForDerivations) clean
else clean;
mkFlakeCiJobs = project: {
packages
, checks
, coverage
, devShells
, checkedProject
}: {
# Run all the tests and code coverage
checks = removeRecurseForDerivations checks;
inherit
coverage
# Make sure all the packages build
packages
# Build and cache any tools in the `devShells`
devShells;
# Build tools and cache tools needed for the project
inherit (project) roots;
}
# Build the plan-nix and check it if materialized
// lib.optionalAttrs (checkedProject ? plan-nix) {
inherit (checkedProject) plan-nix;
}
# Build the stack-nix and check it if materialized
// lib.optionalAttrs (checkedProject ? stack-nix) {
inherit (checkedProject) stack-nix;
};
mkFlake = project: {
selectPackages ? haskellLib.selectProjectPackages
, haskellPackages ? selectPackages project.hsPkgs
, packages ? mkFlakePackages haskellPackages
, apps ? mkFlakeApps haskellPackages
, checks ? mkFlakeChecks (collectChecks' haskellPackages)
, coverage ? {}
, devShell ? project.shell
, devShells ? { default = devShell; }
, checkedProject ? project.appendModule { checkMaterialization = true; }
, ciJobs ? mkFlakeCiJobs project { inherit checks coverage packages devShells checkedProject; }
, hydraJobs ? ciJobs
}: {
inherit
# Used by:
# `nix build .#pkg-name:lib:pkg-name`
# `nix build .#pkg-name:lib:sublib-name`
# `nix build .#pkg-name:exe:exe-name`
# `nix build .#pkg-name:test:test-name`
packages
# Used by:
# `nix flake check`
checks
# `nix run .#pkg-name:exe:exe-name`
# `nix run .#pkg-name:test:test-name`
apps
# Used by hydra.
hydraJobs
# Like `hydraJobs` but with `${system}` first so that it the IFDs will not have
# to run for systems we are not testing (placement of `${system}` is done
# by `flake-utils.eachSystem` and it treats `hydraJobs` differently from
# the other flake attributes).
# See https://github.com/numtide/flake-utils/blob/04c1b180862888302ddfb2e3ad9eaa63afc60cf8/default.nix#L131-L134
ciJobs
# Used by:
# `nix develop`
devShells
devShell; # TODO remove devShell once everyone has nix that supports `devShells.default`
};
# Adapt a standard project shell (`project.shell` or `haskell-nix.shellFor`)
# into a devshell module (https://github.com/numtide/devshell)
# that should provide the same environnement.
devshellFor = shell: {
packages = lib.filter lib.isDerivation (shell.nativeBuildInputs
# devshell does not use pkgs.mkShell / pkgs.stdenv.mkDerivation,
# so we need to explicit required dependencies which
# are provided implicitely by stdenv when using the normal shell:
++ shell.stdenv.defaultNativeBuildInputs)
++ [shell.stdenv.cc.bintools];
# We need to expose all the necessary env variables:
env = [
{
name = "PKG_CONFIG_PATH";
value = lib.makeSearchPath "lib/pkgconfig" shell.buildInputs;
}
] ++ lib.mapAttrsToList lib.nameValuePair ({
inherit (shell) NIX_GHC_LIBDIR;
# CABAL_CONFIG is only set if the shell was built with exactDeps=true
} // lib.optionalAttrs (shell ? CABAL_CONFIG) {
inherit (shell) CABAL_CONFIG;
});
};
makeCompilerDeps = import ./make-compiler-deps.nix {
inherit (pkgs.buildPackages.buildPackages) lib runCommand;
};
# Here we try to figure out which qemu to use based on the host platform.
# This guess can be overridden by passing qemuSuffix
qemuByHostPlatform = hostPlatform:
# I'd prefer this was a dictionary lookup, with a fall through into abort,
# that would make this more readable I guess. I think there is some similar
# mapping somewhere in haskell.nix
if hostPlatform.isAarch32
then "arm"
else if hostPlatform.isAarch64
then "aarch64"
else abort "Don't know which QEMU to use for hostPlatform ${hostPlatform.config}. Please provide qemuSuffix";
# How to run ldd when checking for static linking
lddForTests = "${pkgs.pkgsBuildBuild.glibc.bin}/bin/ldd";
# Version of `lib.unique` that should be fast if the name attributes are unique
uniqueWithName = list:
lib.concatMap lib.unique (
builtins.attrValues (
builtins.groupBy (x: if __typeOf x == "set" then x.name or "noname" else "notset") list));
# Assert that each item in the list is unique
checkUnique = msg: x:
if __length x == __length (uniqueWithName x)
then x
else builtins.throw "Duplicate items found in ${msg} ${
__toJSON (__attrNames (lib.filterAttrs (_: v: __length v > 1) (
builtins.groupBy (x: if __typeOf x == "set" then x.name or "noname" else "notset") x)))
}";
types = import ./types.nix { inherit lib; };
addPackageKeys = x: x // { package-keys = builtins.attrNames x.packages; };
}