Compare commits

..

1 Commits

Author SHA1 Message Date
9fb17624b9 sketch out a module that can add files to a homedir 2024-06-12 16:43:56 -04:00
33 changed files with 1302 additions and 3400 deletions

69
.cabal/config Normal file
View File

@ -0,0 +1,69 @@
-- This is the configuration file for the 'cabal' command line tool.
-- The available configuration options are listed below.
-- Some of them have default values listed.
-- Lines (like this one) beginning with '--' are comments.
-- Be careful with spaces and indentation because they are
-- used to indicate layout for nested sections.
remote-repo: hackage.haskell.org:http://hackage.haskell.org/packages/archive
remote-repo-cache: /Users/jwall/.cabal/packages
-- local-repo:
-- verbose: 1
-- compiler: ghc
-- with-compiler:
-- with-hc-pkg:
-- scratchdir:
-- program-prefix:
-- program-suffix:
-- library-vanilla: True
-- library-profiling: True
-- shared: False
-- executable-profiling: True
-- optimization: True
-- library-for-ghci: True
-- split-objs: False
-- executable-stripping: True
-- user-install: True
-- package-db:
-- flags:
-- extra-include-dirs:
-- extra-lib-dirs:
-- constraint:
-- cabal-lib-version:
-- preference:
-- documentation: False
-- doc-index-file: $datadir/doc/index.html
-- root-cmd:
-- symlink-bindir:
build-summary: /Users/jwall/.cabal/logs/build.log
-- build-log:
remote-build-reporting: anonymous
-- username:
-- password:
install-dirs user
-- prefix: /Users/jwall/.cabal
-- bindir: $prefix/bin
-- libdir: $prefix/lib
-- libsubdir: $pkgid/$compiler
-- libexecdir: $prefix/libexec
-- datadir: $prefix/share
-- datasubdir: $pkgid
-- docdir: $datadir/doc/$pkgid
-- htmldir: $docdir/html
-- haddockdir: $htmldir
install-dirs global
-- prefix: /usr/local
-- bindir: $prefix/bin
-- libdir: $prefix/lib
-- libsubdir: $pkgid/$compiler
-- libexecdir: $prefix/libexec
-- datadir: $prefix/share
-- datasubdir: $pkgid
-- docdir: $datadir/doc/$pkgid
-- htmldir: $docdir/html
-- haddockdir: $htmldir

1
.hammerspoon/.mjolnir Symbolic link
View File

@ -0,0 +1 @@
C:/Users/jwall/.mjolnir

133
.hammerspoon/init.lua Normal file
View File

@ -0,0 +1,133 @@
local mash = {"cmd", "alt", "ctrl"}
hs.hotkey.bind(mash, "R", function() mjolnir.reload() end)
-- Make the window fullscreen
hs.hotkey.bind(mash, "F", function()
local win = hs.window.focusedWindow()
if win then win:maximize() end
end)
-- Move the window to the right 5 pixels
hs.hotkey.bind(mash, "L", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
local nx = f.x + 5
if (f.w + nx) <= scrfrm.w then
f.x = nx
win:setFrame(f)
else
f.x = scrfrm.w
end
end
end)
-- Move the window to the left 5 pixels
hs.hotkey.bind(mash, "H", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
local nx = f.x - 5
if nx >= scrfrm.x then
f.x = nx
win:setFrame(f)
else
f.x = scrfrm.x
end
end
end)
-- Move the window down 5 pixels
hs.hotkey.bind(mash, "J", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
local ny = f.y + 5
if (ny + f.h) <= scrfrm.h then
f.y = ny
win:setFrame(f)
else
f.y = scrfrm.h
end
end
end)
-- Move the window up 5 pixels
hs.hotkey.bind(mash, "K", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
local ny = f.y - 5
if ny >= scrfrm.y then
f.y = ny
win:setFrame(f)
else
f.y = scrfrm.y
end
end
end)
-- Center the window
hs.hotkey.bind(mash, "C", function()
local win = hs.window.focusedWindow()
if win then
win:centerOnScreen()
end
end)
-- Move window all the way to the up
hs.hotkey.bind(mash, "up", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
f.y = 0
win:setFrame(f)
end
end)
-- Move window all the way to the left
hs.hotkey.bind(mash, "left", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
f.x = 0
win:setFrame(f)
end
end)
-- Move window all the way to the down
hs.hotkey.bind(mash, "down", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
f.y = scrfrm.h - f.h
win:setFrame(f)
end
end)
-- Move window all the way to the right
hs.hotkey.bind(mash, "right", function()
local win = hs.window.focusedWindow()
local screen = win:screen()
local scrfrm = screen:frame()
if win then
f = win:frame()
f.x = scrfrm.w - f.w
win:setFrame(f)
end
end)

View File

@ -1,11 +1,5 @@
{
"diagnostics.disable": [
"missing-fields"
],
"diagnostics.globals": [
"vim",
"make_avante_system_prompt",
"update_avante_system_prompt",
"get_server_list_prompt"
]
}

12
.screenrc Normal file
View File

@ -0,0 +1,12 @@
startup_message off
defscrollback 5000
termcapinfo xterm ti@:te@
termcapinfo xterm-color ti@:te@
hardstatus alwayslastline
hardstatus string '%{= kG}[ %{G}%H %{g}][%= %{= kw}%?%-Lw%?%{r}(%{W}%n*%f%t%?(%u)%?%{r})%{w}%?%+Lw%?%?%= %{g}][%{B} %m/%d/%Y %{W}%c %{g}]'
vbell off
shell /bin/bash
logtstamp on
logtstamp after 1
logfile flush
term screen-256color

0
.xmonad/xmonad.errors Normal file
View File

BIN
.xmonad/xmonad.hi Normal file

Binary file not shown.

47
.xmonad/xmonad.hs Normal file
View File

@ -0,0 +1,47 @@
import Control.Monad
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Layout
import XMonad.Layout.Column
import XMonad.Layout.LayoutBuilder
import XMonad.Config.Desktop
import XMonad.Hooks.SetWMName
import qualified XMonad.StackSet as W
import XMonad.Util.EZConfig
import XMonad.Util.Run(spawnPipe)
import System.IO
mkLayout n = layoutN n mainBox (Just overFlowBox) layout overflowLayout
where
mainHeight = 0.75 -- Main window height
mainWidth = 0.68 -- Main window width
row = Mirror $ Column 1 -- a row of windows
layout = Tall 1 0.03 mainWidth -- Top row layout
overflowLayout = layoutAll (relBox 0 mainHeight 1 1) row -- bottom row layout
mainBox = relBox 0 0 1 mainHeight -- Main box
overFlowBox = relBox 0 0 1 1 -- Bottom row
myLayout = ((mkLayout 2) ||| (mkLayout 1)) ||| Full
myGmRunCommand = "gmrun 'rxvt'"
main = do
xmproc <- spawnPipe "xmobar"
xmonad $ desktopConfig {
terminal = "/usr/bin/rxvt"
, focusFollowsMouse = False
, layoutHook = avoidStruts myLayout
, logHook = dynamicLogWithPP $ xmobarPP
{ ppLayout = xmobarColor "grey" "black" . (\ x -> pad "")
, ppOutput = hPutStrLn xmproc
}
, workspaces = ["code", "communication", "random", "configuration"]
} `additionalKeys` [
((mod1Mask, xK_w), kill)
, ((controlMask .|. mod1Mask, xK_Delete), spawn "gnome-screensaver-command --lock")
, ((mod1Mask .|. shiftMask, xK_p), spawn myGmRunCommand) -- %! Launch gmrun
, ((mod1Mask , xK_p), spawn myGmRunCommand) -- %! Launch gmrun
]

122
.yi/yi.hs Normal file
View File

@ -0,0 +1,122 @@
-- |
-- Author : jeremy@marzhillstudios.com
-- Parts of this config were stolen from/inspired by
-- yi/yi-contrib/src/Yi/Config/Michal.hs.
import Yi
import qualified Yi.Keymap.Vim as Vim
import qualified Yi.Keymap.Vim.Common as Vim
import qualified Yi.Keymap.Vim.Utils as Vim
import Yi.Keymap.Keys
import Yi.Modes
import Yi.Style
import Yi.Style.Library
import Yi.Config.Default (availableFrontends)
import Yi.Config.Misc
import Data.Monoid
-- TODO(jwall): :<num> navigation
-- TODO(jwall): handle file updates: checktime functionality
-- TODO(jwall): Visual Mode is broken?
-- default color specifications
fgColor = brightwhite
bgColor = black
bgSelectedColor = lightGrey
builtinColor = magenta
commentColor = darkcyan
typeColor = darkgreen
keywordColor = yellow
quoteColor = magenta
-- extended vim keymap
myKeymap :: KeymapSet
myKeymap = Vim.mkKeymapSet $ Vim.defVimConfig `override` \super self ->
let eval = Vim.pureEval self
in super {
-- put our custom bindings first so they override default bindings
Vim.vimBindings = myBindings eval <> Vim.vimBindings super
}
-- custom keybindings
myBindings :: (Vim.EventString -> EditorM ()) -> [Vim.VimBinding]
myBindings eval =
let nmap x y = Vim.mkStringBindingE Vim.Normal Vim.Drop (x, y, id)
imap x y = Vim.VimBindingE (\evs state -> case Vim.vsMode state of
Vim.Insert _ ->
fmap (const (y >> return Vim.Continue))
(evs `Vim.matchesString` x)
_ -> Vim.NoMatch)
nmap' x y = Vim.mkStringBindingY Vim.Normal (x, y, id)
in [
-- custom bindings here
]
-- GUI enhanced colors
guiBgColor = RGB 20 20 20
defaultVimTermTheme :: Theme
defaultVimTermTheme = defaultTheme `override` \super self -> super {
modelineAttributes = emptyAttributes {foreground = white,
background = grey},
modelineFocusStyle = withBg grey `mappend` withFg fgColor,
baseAttributes = emptyAttributes { foreground = fgColor,
background = bgColor},
builtinStyle = withFg builtinColor,
commentStyle = withFg commentColor,
typeStyle = withFg typeColor,
importStyle = withFg keywordColor,
selectedStyle = withBg bgSelectedColor,
stringStyle = withFg quoteColor,
keywordStyle = withFg green
}
defaultVimGuiTheme :: Theme
defaultVimGuiTheme = defaultVimTermTheme `override` \super self -> super {
baseAttributes = emptyAttributes { foreground = fgColor,
background = guiBgColor}
}
-- global indent preferences
prefIndent :: Mode s -> Mode s
prefIndent m = m {
modeIndentSettings = IndentSettings
{
expandTabs = True
, shiftWidth = 2
, tabSize = 2
}
}
myDefaultUI = configUI defaultVimConfig
myConfigDefaultUI :: UIConfig
myConfigDefaultUI = myDefaultUI {
configFontSize = Just 9
, configTheme = defaultVimGuiTheme
, configWindowFill = '~'
}
myConfigTermUI :: UIConfig -- reuse the above defaults here
myConfigTermUI = myConfigDefaultUI {
configTheme = defaultVimTermTheme
}
myDefaultConfig = defaultVimConfig {
configUI = myConfigDefaultUI
, defaultKm = myKeymap
, modeTable = fmap (onMode prefIndent) (modeTable defaultVimConfig)
}
myConfig =
case availableFrontends of
(("vty", f):_) -> myDefaultConfig {
configUI = myConfigTermUI
}
((_, f):_) -> myDefaultConfig
main :: IO ()
main = yi myConfig

View File

@ -1,5 +1,2 @@
%-darwin:
sudo darwin-rebuild --flake ./nix/base-system $*
update-input-%:
nix flake update $* ./nix/base-system/
darwin-rebuild --flake ./nix/base-system $*

283
docker-compose.yaml Normal file
View File

@ -0,0 +1,283 @@
---
version: "3.5"
networks:
penpot:
volumes:
penpot_postgres_v15:
penpot_assets:
# penpot_traefik:
# penpot_minio:
services:
## Traefik service declaration example. Consider using it if you are going to expose
## penpot to the internet or different host than `localhost`.
# traefik:
# image: traefik:v2.9
# networks:
# - penpot
# command:
# - "--api.insecure=true"
# - "--entryPoints.web.address=:80"
# - "--providers.docker=true"
# - "--providers.docker.exposedbydefault=false"
# - "--entryPoints.websecure.address=:443"
# - "--certificatesresolvers.letsencrypt.acme.tlschallenge=true"
# - "--certificatesresolvers.letsencrypt.acme.email=<EMAIL_ADDRESS>"
# - "--certificatesresolvers.letsencrypt.acme.storage=/traefik/acme.json"
# volumes:
# - "penpot_traefik:/traefik"
# - "/var/run/docker.sock:/var/run/docker.sock"
# ports:
# - "80:80"
# - "443:443"
penpot-frontend:
image: "penpotapp/frontend:latest"
ports:
- 9001:80
volumes:
- penpot_assets:/opt/data/assets
depends_on:
- penpot-backend
- penpot-exporter
networks:
- penpot
labels:
- "traefik.enable=true"
## HTTP: example of labels for the case if you are going to expose penpot to the
## internet using only HTTP (without HTTPS) with traefik
# - "traefik.http.routers.penpot-http.entrypoints=web"
# - "traefik.http.routers.penpot-http.rule=Host(`<DOMAIN_NAME>`)"
# - "traefik.http.services.penpot-http.loadbalancer.server.port=80"
## HTTPS: example of labels for the case if you are going to expose penpot to the
## internet using with HTTPS using traefik
# - "traefik.http.middlewares.http-redirect.redirectscheme.scheme=https"
# - "traefik.http.middlewares.http-redirect.redirectscheme.permanent=true"
# - "traefik.http.routers.penpot-http.entrypoints=web"
# - "traefik.http.routers.penpot-http.rule=Host(`<DOMAIN_NAME>`)"
# - "traefik.http.routers.penpot-http.middlewares=http-redirect"
# - "traefik.http.routers.penpot-https.entrypoints=websecure"
# - "traefik.http.routers.penpot-https.rule=Host(`<DOMAIN_NAME>`)"
# - "traefik.http.services.penpot-https.loadbalancer.server.port=80"
# - "traefik.http.routers.penpot-https.tls=true"
# - "traefik.http.routers.penpot-https.tls.certresolver=letsencrypt"
## Configuration envronment variables for frontend the container. In this case this
## container only needs the `PENPOT_FLAGS`. This environment variable is shared with
## other services but not all flags are relevant to all services.
environment:
## Relevant flags for frontend:
## - demo-users
## - login-with-github
## - login-with-gitlab
## - login-with-google
## - login-with-ldap
## - login-with-oidc
## - login-with-password
## - registration
## - webhooks
##
## You can read more about all available flags on:
## https://help.penpot.app/technical-guide/configuration/#advanced-configuration
- PENPOT_FLAGS=enable-registration enable-login-with-password
penpot-backend:
image: "penpotapp/backend:latest"
volumes:
- penpot_assets:/opt/penpot/assets
depends_on:
- penpot-postgres
- penpot-redis
networks:
- penpot
## Configuration envronment variables for backend the
## container.
environment:
## Relevant flags for backend:
## - demo-users
## - email-verification
## - log-emails
## - log-invitation-tokens
## - login-with-github
## - login-with-gitlab
## - login-with-google
## - login-with-ldap
## - login-with-oidc
## - login-with-password
## - registration
## - secure-session-cookies
## - smtp
## - smtp-debug
## - telemetry
## - webhooks
## - prepl-server
##
## You can read more about all available flags and other
## environment variables for the backend here:
## https://help.penpot.app/technical-guide/configuration/#advanced-configuration
- PENPOT_FLAGS=enable-registration enable-login-with-password disable-email-verification enable-smtp enable-prepl-server
## Penpot SECRET KEY. It serves as a master key from which other keys for subsystems
## (eg http sessions) are derived.
##
## Leave it comment if it is ok for you to have to login again after each backend
## restart.
##
## If you going to uncomment this, we recommend use here a trully randomly generated
## 512 bits base64 encoded string. You can generate one with:
##
## python3 -c "import secrets; print(secrets.token_urlsafe(64))"
# - PENPOT_SECRET_KEY=my-insecure-key
## The PREPL host. Mainly used for external programatic access to penpot backend
## (example: admin). By default it listen on `localhost` but if you are going to use
## the `admin`, you will need to uncomment this and set the host to `0.0.0.0`.
# - PENPOT_PREPL_HOST=0.0.0.0
## Public URI. If you are going to expose this instance to the internet and use it
## under different domain than 'localhost', you will need to adjust it to the final
## domain.
##
## Consider using traefik and set the 'disable-secure-session-cookies' if you are
## not going to serve penpot under HTTPS.
- PENPOT_PUBLIC_URI=http://localhost:9001
## Database connection parameters. Don't touch them unless you are using custom
## postgresql connection parameters.
- PENPOT_DATABASE_URI=postgresql://penpot-postgres/penpot
- PENPOT_DATABASE_USERNAME=penpot
- PENPOT_DATABASE_PASSWORD=penpot
## Redis is used for the websockets notifications. Don't touch unless the redis
## container has different parameters or different name.
- PENPOT_REDIS_URI=redis://penpot-redis/0
## Default configuration for assets storage: using filesystem based with all files
## stored in a docker volume.
- PENPOT_ASSETS_STORAGE_BACKEND=assets-fs
- PENPOT_STORAGE_ASSETS_FS_DIRECTORY=/opt/data/assets
## Also can be configured to to use a S3 compatible storage
## service like MiniIO. Look below for minio service setup.
# - AWS_ACCESS_KEY_ID=<KEY_ID>
# - AWS_SECRET_ACCESS_KEY=<ACCESS_KEY>
# - PENPOT_ASSETS_STORAGE_BACKEND=assets-s3
# - PENPOT_STORAGE_ASSETS_S3_ENDPOINT=http://penpot-minio:9000
# - PENPOT_STORAGE_ASSETS_S3_BUCKET=<BUKET_NAME>
## Telemetry. When enabled, a periodical process will send anonymous data about this
## instance. Telemetry data will enable us to learn on how the application is used,
## based on real scenarios. If you want to help us, please leave it enabled. You can
## audit what data we send with the code available on github
- PENPOT_TELEMETRY_ENABLED=true
## Example SMTP/Email configuration. By default, emails are sent to the mailcatch
## service, but for production usage is recommended to setup a real SMTP
## provider. Emails are used to confirm user registrations & invitations. Look below
## how mailcatch service is configured.
- PENPOT_SMTP_DEFAULT_FROM=no-reply@example.com
- PENPOT_SMTP_DEFAULT_REPLY_TO=no-reply@example.com
- PENPOT_SMTP_HOST=penpot-mailcatch
- PENPOT_SMTP_PORT=1025
- PENPOT_SMTP_USERNAME=
- PENPOT_SMTP_PASSWORD=
- PENPOT_SMTP_TLS=false
- PENPOT_SMTP_SSL=false
penpot-exporter:
image: "penpotapp/exporter:latest"
networks:
- penpot
environment:
# Don't touch it; this uses internal docker network to
# communicate with the frontend.
- PENPOT_PUBLIC_URI=http://penpot-frontend
## Redis is used for the websockets notifications.
- PENPOT_REDIS_URI=redis://penpot-redis/0
penpot-postgres:
image: "postgres:15"
restart: always
stop_signal: SIGINT
volumes:
- penpot_postgres_v15:/var/lib/postgresql/data
networks:
- penpot
environment:
- POSTGRES_INITDB_ARGS=--data-checksums
- POSTGRES_DB=penpot
- POSTGRES_USER=penpot
- POSTGRES_PASSWORD=penpot
penpot-redis:
image: redis:7
restart: always
networks:
- penpot
## A mailcatch service, used as temporal SMTP server. You can access via HTTP to the
## port 1080 for read all emails the penpot platform has sent. Should be only used as a
## temporal solution meanwhile you don't have a real SMTP provider configured.
penpot-mailcatch:
image: sj26/mailcatcher:latest
restart: always
expose:
- '1025'
ports:
- "1080:1080"
networks:
- penpot
## Example configuration of MiniIO (S3 compatible object storage service); If you don't
## have preference, then just use filesystem, this is here just for the completeness.
# minio:
# image: "minio/minio:latest"
# command: minio server /mnt/data --console-address ":9001"
#
# volumes:
# - "penpot_minio:/mnt/data"
#
# environment:
# - MINIO_ROOT_USER=minioadmin
# - MINIO_ROOT_PASSWORD=minioadmin
#
# ports:
# - 9000:9000
# - 9001:9001

19
nix/base-system/.zshrc Normal file
View File

@ -0,0 +1,19 @@
set -o vi
export PROMPT='%F{green}[%T] %F{cyan}(%n@%m) %F{lightgrey}(%y) %F{cyan} %~
%F{white}%(!.>>.$>) '
# Alwoys use C-R for history search backward
bindkey '^R' history-incremental-search-backward
# Opam configurattion
[[ ! -r ~/.opam/opam-init/init.zsh ]] || source ~/.opam/opam-init/init.zsh > /dev/null 2> /dev/null
# dotnet stuff
if [ -f "$HOME/Library/Application Support/dnvm/env" ]; then
. "$HOME/Library/Application Support/dnvm/env"
fi
function service_restart() {
launchctl stop $1
launchctl start $1
}

54
nix/base-system/d2.nix Normal file
View File

@ -0,0 +1,54 @@
{ lib
, buildGoModule
, fetchFromGitHub
, installShellFiles
, git
, testers
, d2
}:
buildGoModule rec {
pname = "d2";
version = "0.6.3";
src = fetchFromGitHub {
owner = "terrastruct";
repo = pname;
rev = "v${version}";
hash = "sha256-GImv4OJHanj6dKtAJpTaGLrR4AaVTboiYHwRdh/gXaU";
};
vendorHash = "sha256-T7eki06fQuGvYIJKvBJsIkFS1fQ9Jbv+ieUSr2vupqg=";
excludedPackages = [ "./e2etests" ];
ldflags = [
"-s"
"-w"
"-X oss.terrastruct.com/d2/lib/version.Version=v${version}"
];
nativeBuildInputs = [ installShellFiles ];
postInstall = ''
installManPage ci/release/template/man/d2.1
'';
nativeCheckInputs = [ git ];
preCheck = ''
# See https://github.com/terrastruct/d2/blob/master/docs/CONTRIBUTING.md#running-tests.
export TESTDATA_ACCEPT=1
'';
passthru.tests.version = testers.testVersion {
package = d2;
version = "v${version}";
};
meta = with lib; {
description = "A modern diagram scripting language that turns text to diagrams";
homepage = "https://d2lang.com";
license = licenses.mpl20;
};
}

View File

@ -1,18 +1,13 @@
{ pkgs, config, lib, ... }:
let
vfkit = pkgs.callPackage ../packages/vfkit.nix {};
in
{
nix = {
package = pkgs.nix;
enable = true;
# SEE: https://github.com/NixOS/nix/issues/4119#issuecomment-1734738812
settings.sandbox = "relaxed";
extraOptions = ''
experimental-features = nix-command flakes
experimental-features = nix-command flakes repl-flake
extra-platforms = x86_64-darwin aarch64-darwin x86_64-linux
trusted-users = root zaphar
'';
};
@ -33,23 +28,18 @@ in
];
# TODO(zaphar): Move this to a module.
#launchd.user.agents.ipfs = {
# serviceConfig = {
# ProgramArguments = [
# "${pkgs.kubo}/bin/ipfs"
# "daemon"
# "--init"
# ];
# KeepAlive = true;
# RunAtLoad = true;
# };
#};
services.ollama = {
enable = true;
user="zaphar";
launchd.user.agents.ipfs = {
serviceConfig = {
ProgramArguments = [
"${pkgs.kubo}/bin/ipfs"
"daemon"
"--init"
];
KeepAlive = true;
RunAtLoad = true;
};
};
services.my-lorri.enable = true;
services.durnitisp.enable = true;
services.node-exporter.enable = true;
services.prometheus.enable = true;
@ -154,16 +144,6 @@ in
yaxis = "y1";
};
}
{
source = "http://${config.services.prometheus.listen}";
query = ''
delta(ping_counter{result="dropped"}[5m])
'';
config = {
name_format = "`icmp \${labels.domain} drop count`";
yaxis = "y1";
};
}
];
}
{
@ -209,7 +189,6 @@ in
data_dir = "/var/lib/vector";
api = {
enabled = true;
address = "127.0.0.1:8686";
};
sources = {
prometheus = {
@ -248,14 +227,8 @@ in
type = "file";
include = [
"/var/log/system.log"
"/var/log/com.apple.xpc.launchd/launchd.log"
];
};
syslog_source = {
type = "exec";
command = ["/usr/bin/log" "stream" "--style" "ndjson"];
mode = "streaming";
};
};
transforms = {
durnitisp_no_tty = {
@ -267,56 +240,9 @@ in
.message = strip_ansi_escape_codes(.message) ?? .message
'';
};
syslog = {
type = "remap";
inputs = [
"syslog_source"
];
source = ''
.message = parse_json(.message) ?? .message
del(.command)
.syslog
.eventType = .message.eventType
.processImagePath = .message.processImagePath
'';
};
};
sinks = {
victoria_vector = {
type = "elasticsearch";
mode = "bulk";
endpoints = [
"http://${config.services.victoria-logs.listenAddr}/insert/elasticsearch"
];
inputs = [
"vector"
];
api_version = "v8";
healthcheck.enabled = false;
query = {
_msg_field = "message";
_time_field = "timestamp";
_stream_fields = "host,source_type";
};
};
victoria_syslog = {
type = "elasticsearch";
mode = "bulk";
endpoints = [
"http://${config.services.victoria-logs.listenAddr}/insert/elasticsearch"
];
inputs = [
"syslog"
];
api_version = "v8";
healthcheck.enabled = false;
query = {
_msg_field = "message";
_time_field = "timestamp";
_stream_fields = "host,processImagePath,eventType";
};
};
victoria_files = {
victoria = {
type = "elasticsearch";
mode = "bulk";
endpoints = [
@ -326,6 +252,7 @@ in
"prometheus"
#"heracles"
"durnitisp_no_tty"
"vector"
"victoria-logs"
"system"
];
@ -340,13 +267,13 @@ in
};
};
# TODO launchd.user.agents.prometheus;
# Use a custom configuration.nix location.
# $ darwin-rebuild switch -I darwin-config=$HOME/.config/nixpkgs/darwin/configuration.nix
# environment.darwinConfig = "$HOME/.config/nixpkgs/darwin/configuration.nix";
environment.systemPackages = [
# Required on darwin for podman machine to work.
vfkit
];
# Auto upgrade nix package and the daemon service.
services.nix-daemon.enable = true;
#services.spacebar.enable = true;
#services.spacebar.package = "${pkgs.spacebar}";
@ -363,7 +290,6 @@ in
(allow file-read* file-write* process-exec mach-lookup (subpath "${builtins.storeDir}"))
'';
};
system.primaryUser = "zaphar";
# Used for backwards compatibility, please read the changelog before changing.
# $ darwin-rebuild changelog
system.stateVersion = 4;

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +1,18 @@
{
inputs = {
# Default to sane nixpkgs versions
nixpkgs.url = "github:nixos/nixpkgs/release-25.05";
nixpkgs.url = "github:nixos/nixpkgs/24.05";
unstable.url = "nixpkgs";
nixpkgs-darwin.url = "github:nixos/nixpkgs/nixpkgs-25.05-darwin";
nixpkgs-darwin.url = "github:nixos/nixpkgs/nixpkgs-24.05-darwin";
#lean4-flake = {
# url = "github:leanprover/lean4/v4.4.0";
# inputs.nixpkgs.follows = "nixpkgs";
#};
darwin = {
url = "github:lnl7/nix-darwin/nix-darwin-25.05";
url = "github:lnl7/nix-darwin";
# ensure that darwinSystem uses our nixpkgs version
inputs.nixpkgs.follows = "nixpkgs-darwin";
};
sheetsui-flake.url = "github:zaphar/sheetsui";
sile-flake.url = "github:sile-typesetter/sile";
durnitisp-flake.url = "github:zaphar/durnitisp";
runwhen-flake.url = "github:zaphar/runwhen";
@ -31,15 +30,15 @@
inputs.nixpkgs.follows = "nixpkgs";
};
#neovim-flake = {
# url = "github:neovim/neovim/stable?dir=contrib";
# # NOTE(jeremy): Currently this needs a newer nixpkgs version
# #inputs.nixpkgs.follows = "nixpkgs";
#};
jujutsu-flake.url = "github:martinvonz/jj";
custom-flakes = {
url = "github:zaphar/nix-flakes";
inputs.nixpkgs.follows = "nixpkgs";
neovim-flake = {
url = "github:neovim/neovim/stable?dir=contrib";
# NOTE(jeremy): Currently this needs a newer nixpkgs version
#inputs.nixpkgs.follows = "nixpkgs";
};
harpoon-src = {
url = "github:ThePrimeagen/harpoon/harpoon2";
flake = false;
};
neogit-src = {
url = "github:NeogitOrg/neogit";
@ -49,61 +48,77 @@
url = "github:terrastruct/d2-vim";
flake = false;
};
# We need to pin to this version of treesitter because it breaks after this revision
treesitter-context = {
url = "github:nvim-treesitter/nvim-treesitter-context/e6b743ccd4e780bc9cd85b707de67df72eea1a23";
flake = false;
};
roslyn-lsp = {
url = "github:zaphar/roslyn.nvim/main";
flake = false;
};
heracles-flake.url = "github:zaphar/Heracles";
mcphub-flake.url = "github:ravitemer/mcphub.nvim/v5.0.1";
};
outputs = {
darwin,
sile-flake,
jujutsu-flake,
runwhen-flake,
durnitisp-flake,
rust-overlay-flake,
agenix-flake,
nil-flake,
nurl-flake,
custom-flakes,
harpoon-src,
neogit-src,
d2-vim-src,
treesitter-context,
roslyn-lsp,
#lean4-flake,
unstable,
heracles-flake,
neovim-flake,
clio-flake,
sheetsui-flake,
mcphub-flake,
... # We don't use the self or nixpkgs args here so we just glob it.
}:
rec {
vimModule = system: let
nil-pkg = nil-flake.packages."${system}".default;
mcp-hub-binary = custom-flakes.packages."${system}".mcp-hub;
claude-code-binary = custom-flakes.packages."${system}".claude-code;
in
{ config, pkgs, ...}: let
unstablePkgs = import unstable { inherit system; };
neogit-nvim = custom-flakes.packages."${system}".neogit-nvim;
d2-vim = custom-flakes.packages."${system}".d2-vim;
hunk-nvim = custom-flakes.packages."${system}".hunk-nvim;
# TODO(zaphar): Apparently this is a remote plugin so it needs some additional love.
#nvim-bnf = custom-flakes.packages."${system}".nvim-bnf;
roslyn-nvim = custom-flakes.packages."${system}".roslyn-nvim;
ionide-nvim = custom-flakes.packages."${system}".ionide-nvim;
claude-code-nvim = custom-flakes.packages."${system}".claude-code-nvim;
# TODO(zaphar): Until nixpkgs update to a newer version of tree-sitter the queries will be wrong
# for csharp
#tree-sitter-csharp = pkgs.callPackage ./nvim-treesitter-csharp.nix {
# inherit (pkgs.neovimUtils) grammarToPlugin;
# inherit (pkgs.tree-sitter) buildGrammar;
# inherit (pkgs) fetchFromGitHub;
#};
possession-nvim = pkgs.callPackage ./possession-nvim.nix {
inherit (pkgs.vimUtils) buildVimPlugin;
inherit (pkgs) fetchFromGitHub;
};
harpoon-nvim = pkgs.vimUtils.buildVimPlugin {
pname = "harpoon";
version = "2024-01-28";
src = harpoon-src;
};
neogit-nvim = pkgs.vimUtils.buildVimPlugin {
pname = "neogit";
version = "2024-05-16";
src = neogit-src;
};
d2-vim = pkgs.vimUtils.buildVimPlugin {
pname = "d2-nvim";
version = "2024-01-28";
src = d2-vim-src;
};
nvim-treesitter-context = pkgs.vimUtils.buildVimPlugin {
name = "nvim-treesitter-context";
src = treesitter-context;
};
roslyn-nvim = pkgs.vimUtils.buildVimPlugin {
name = "roslyn-nvim";
src = roslyn-lsp;
};
nvim = neovim-flake.packages."${system}".neovim;
# TODO(jwall): When this actually builds we should use it.
#nvim-treesitter-powershell = pkgs.callPackage ./nvim-powershell.nix {
# inherit (pkgs.tree-sitter) buildGrammar;
# inherit (pkgs) fetchFromGitHub;
#};
mcphub-nvim = mcphub-flake.packages."${system}".default;
#nvim-treesitter-powershell = pkgs.callPackage ./nvim-powershell.nix {
# inherit (pkgs.tree-sitter) buildGrammar;
# inherit (pkgs) fetchFromGitHub;
#};
in {
imports = [
./program-neovim.nix
@ -117,117 +132,94 @@
nixpkgs.overlays = [
(final: prev: {
lorri = unstablePkgs.lorri;
custom-neovim = nvim;
})
];
programs = with unstablePkgs; {
programs = with pkgs; {
neovim.enable = true;
neovim.vimAlias = true;
neovim.viAlias = true;
neovim.package = unstablePkgs.neovim-unwrapped;
neovim.package = pkgs.custom-neovim;
neovim.configure = {
customRC = "lua << EOF
customRC = "lua << EOF
${builtins.readFile ./init.lua}
EOF";
packages.myVimPackage = {
start = (with pkgs.vimPlugins; [
copilot-lua
avante-nvim
vim-sile
nvim-tree-lua
nvim-lspconfig
packer-nvim
vim-ps1
vim-lsp
vim-vsnip
nvim-cmp
cmp-nvim-lua
cmp-nvim-lsp
cmp-vsnip
cmp-buffer
cmp-path
cmp-nvim-lsp-signature-help
nvim-dap # Debug Adapter Protocol support
#nvim-dap-ui
hoon-vim
nvim-treesitter
lualine-nvim
lualine-lsp-progress
lean-nvim
roslyn-nvim
ionide-nvim # Fsharp lsp support
nvim-treesitter-context
nvim-treesitter-textobjects
nvim-treesitter-parsers.ini
nvim-treesitter-parsers.ebnf
nvim-treesitter-parsers.rust
nvim-treesitter-parsers.c
#nvim-treesitter-parsers.c_sharp # currently broken for some reason
nvim-treesitter-parsers.cpp
nvim-treesitter-parsers.clojure
nvim-treesitter-parsers.latex
nvim-treesitter-parsers.terraform
nvim-treesitter-parsers.hcl
nvim-treesitter-parsers.yaml
nvim-treesitter-parsers.lua
nvim-treesitter-parsers.vim
nvim-treesitter-parsers.go
nvim-treesitter-parsers.toml
nvim-treesitter-parsers.tlaplus
nvim-treesitter-parsers.typescript
nvim-treesitter-parsers.starlark
nvim-treesitter-parsers.python
nvim-treesitter-parsers.sql
nvim-treesitter-parsers.javascript
nvim-treesitter-parsers.ocaml
nvim-treesitter-parsers.haskell
nvim-treesitter-parsers.css
nvim-treesitter-parsers.promql
nvim-treesitter-parsers.nix
nvim-treesitter-parsers.zig
nvim-treesitter-parsers.hoon
#omnisharp-extended-lsp-nvim
#neotest-dotnet
nix-develop-nvim
trouble-nvim
nightfox-nvim
melange-nvim
telescope-nvim
telescope-lsp-handlers-nvim
plenary-nvim
vim-dadbod
vim-dadbod-ui
vim-dadbod-completion
vim-dasht
direnv-vim
mcphub-nvim
claude-code-nvim
]) ++ [
d2-vim
hunk-nvim
neogit-nvim
# tree-sitter-csharp.neovim-plugin # Until nixpkgs updates their nvim-treesitter config the csharp queries will be broken
];
};
packages.myVimPackage = {
start = (with pkgs.vimPlugins; [
nvim-tree-lua
nvim-lspconfig
packer-nvim
vim-ps1
vim-lsp
vim-vsnip
nvim-cmp
cmp-nvim-lua
cmp-nvim-lsp
cmp-vsnip
cmp-buffer
cmp-path
cmp-nvim-lsp-signature-help
nvim-dap # Debug Adapter Protocol support
#nvim-dap-ui
hoon-vim
nvim-treesitter
lean-nvim
roslyn-nvim
nvim-treesitter-context
nvim-treesitter-textobjects
nvim-treesitter-parsers.ini
nvim-treesitter-parsers.rust
nvim-treesitter-parsers.c
nvim-treesitter-parsers.c_sharp
nvim-treesitter-parsers.cpp
nvim-treesitter-parsers.clojure
nvim-treesitter-parsers.latex
nvim-treesitter-parsers.terraform
nvim-treesitter-parsers.hcl
nvim-treesitter-parsers.yaml
nvim-treesitter-parsers.lua
nvim-treesitter-parsers.vim
nvim-treesitter-parsers.go
nvim-treesitter-parsers.toml
nvim-treesitter-parsers.tlaplus
nvim-treesitter-parsers.typescript
nvim-treesitter-parsers.starlark
nvim-treesitter-parsers.python
nvim-treesitter-parsers.sql
nvim-treesitter-parsers.javascript
nvim-treesitter-parsers.ocaml
nvim-treesitter-parsers.haskell
nvim-treesitter-parsers.css
nvim-treesitter-parsers.promql
nvim-treesitter-parsers.nix
nvim-treesitter-parsers.hoon
omnisharp-extended-lsp-nvim
#neotest-dotnet
nix-develop-nvim
trouble-nvim
nightfox-nvim
melange-nvim
telescope-nvim
telescope-lsp-handlers-nvim
plenary-nvim
vim-dadbod
vim-dadbod-ui
vim-dadbod-completion
]) ++ [ possession-nvim harpoon-nvim d2-vim neogit-nvim ];
};
};
};
environment.systemPackages = (with pkgs; [
terraform-ls
tinymist
nodePackages.typescript-language-server
nodePackages.jsdoc
# TODO find a version of the julia package that will install
# on darwin and add it as an overlay
nil-pkg
lua-language-server
lua-language-server
rust-analyzer
dasht
direnv
lorri
devenv
unstablePkgs.fq
mcp-hub-binary
claude-code-binary
# TODO(jwall): This is no longer provided for darwin in nixpkgs
#dbeaver-bin
]);
};
};
@ -236,14 +228,14 @@ EOF";
durnitisp = durnitisp-flake.defaultPackage."${system}";
rust-overlay = rust-overlay-flake.overlays.default;
sile = sile-flake.defaultPackage.${system};
jujutsu = jujutsu-flake.packages.${system}.jujutsu;
age = agenix-flake.packages."${system}".default;
nurl = nurl-flake.packages."${system}".default;
clio = clio-flake.packages."${system}".default;
unstablePkgs = import unstable { inherit system; };
sheetsui = sheetsui-flake.packages."${system}".default;
#lean4Pkg = lean4-flake.defaultPackage."${system}";
in { config, pkgs, ... }: {
in { config, pkgs, ... }: let
d2-lang = pkgs.callPackage ./d2.nix {};
in {
imports = [
agenix-flake.nixosModules.default
@ -260,16 +252,6 @@ EOF";
durnitisp = durnitisp;
clio = clio;
victoria-logs = pkgs.callPackage ../packages/victoria-logs.nix { };
podman = unstablePkgs.podman;
podman-compose = unstablePkgs.podman-compose;
sc-im = unstablePkgs.sc-im;
gnumeric = prev.gnumeric.overrideAttrs(oldAttrs: {
meta.broken = false;
});
uv = unstablePkgs.uv;
quint = (pkgs.callPackage ../packages/quint/default.nix {})."@informalsystems/quint";
quint-lsp = (pkgs.callPackage ../packages/quint/default.nix {})."@informalsystems/quint-language-server";
ollama = unstablePkgs.ollama;
})
rust-overlay
];
@ -286,7 +268,7 @@ EOF";
enable = true; # default shell on catalina
# This is a total hack but we don't want the default clobbering it which it does if it's an empty string or null
promptInit = "#noop";
interactiveShellInit = (builtins.readFile ./zshrc);
interactiveShellInit = (builtins.readFile ./.zshrc);
};
tmux = {
@ -296,17 +278,22 @@ EOF";
};
};
environment.systemPackages = (with pkgs; [
sheetsui
# TODO(jwall): This appears to be broken due to: https://github.com/NixOS/nixpkgs/issues/166205
# Should be fixed by: https://github.com/NixOS/nixpkgs/pull/282624
#isabelle
# Should be fixed by: https://github.com/NixOS/nixpkgs/pull/282624
#(lean4.override { stdenv = stdenv.override { allowedRequisites = null; cc = llvmPackages_11.clang; }; })
lean4
quint
quint-lsp
terraform # TODO(jeremy): Replace with opentofu when that is an option.
nomad
oha
nodejs
gnumake
# TODO(zaphar): find a version of the julia package that will install
# TODO find a version of the julia package that will install
# on darwin and add it as an overlay
uv
python310
python310Packages.pip
python310Packages.virtualenv
emacs
git
mercurial
@ -321,8 +308,8 @@ EOF";
htop
colima
ghidra
podman
podman-compose
#podman-desktop # Broken on darwin right now with electron build issues.
# Note that podman expects qemu to be installed in order to use the podman machine setup.
qemu
lima
rlwrap
@ -332,7 +319,8 @@ EOF";
alloy6
tlaplus
jdk
d2
# TODO(zaphar): d2 is broken in latest nixpkgs
#d2-lang
plantuml-c4
nssTools
nomad
@ -343,33 +331,23 @@ EOF";
# NOTE(jwall): I include these initially for nvim telescope
ripgrep
fd
sc-im
gnumeric
wezterm
wezterm.terminfo
#ocaml
#opam
unstablePkgs.vector
vector
victoriametrics
# TODO add sonic-pi here if it supports the arch
unstablePkgs.dbeaver-bin
postgresql
unstablePkgs.ollama
])
#++ (with pkgs.ocamlPackages; [
# dune_3
# odoc
# ocaml-lsp
# merlin
# utop
#])
++ (with pkgs.ocamlPackages; [
dune_3
odoc
ocaml-lsp
merlin
utop
])
++ [
sile
runwhen
durnitisp
age
nurl
jujutsu
];
};
};
@ -382,8 +360,6 @@ EOF";
./modules/darwin-monitor.nix
./modules/victoria-logs.nix
./modules/vector.nix
./modules/lorri.nix
./modules/ollama.nix
./darwin-configuration.nix
];
};

View File

@ -16,14 +16,6 @@ vim.opt.mouse = ""
vim.opt.tabstop = 4
vim.opt.shiftwidth = 4
--vim.opt.smarttab = false
vim.opt.fileformats = "unix,dos"
-- Recommended by Avante docs
-- views can only be fully collapsed with the global statusline
vim.opt.laststatus = 3
-- formatexpr defaulted to the lsp provider by default recently
-- which breaks `gq` and company paragraph formatting in non lsp
-- contexts.
vim.opt.formatexpr = ""
vim.g.BASH_AuthorName = 'Jeremy Wall'
vim.g.BASH_AuthorRef = 'jw'
@ -36,34 +28,6 @@ vim.cmd("noswapfile")
vim.cmd("syntax on")
vim.cmd("filetype plugin on")
vim.api.nvim_create_autocmd({ "BufEnter", "BufWinEnter" }, {
pattern = { "*.qnt" },
callback = function(args)
vim.lsp.start({
name = 'quint',
cmd = { 'quint-language-server', '--stdio' },
root_dir = vim.fs.dirname(vim.uri_from_bufnr(args.buf))
})
end,
})
vim.api.nvim_create_autocmd({ "BufNewfile", "BufRead" }, {
callback = function(args)
-- If treesitter supports this filetype then use the treesitter fold expression
local ft = vim.bo.filetype
if ft and ft ~= "" then
-- Safely check if a parser exists for this filetype
local has_parser = pcall(function() return vim.treesitter.language.inspect(ft) end)
if has_parser then
vim.wo.foldexpr = 'v:lua.vim.treesitter.foldexpr()'
vim.wo.foldmethod = 'expr'
vim.wo.foldlevel = 10
end
end
end,
})
vim.cmd([[
au BufNewFile,BufRead *Makefile,*.mk set noexpandtab
]])
@ -88,6 +52,10 @@ vim.cmd([[
au BufNewFile,BufRead .bash_* set filetype=sh
]])
vim.cmd([[
au BufNewFile,BufRead *.sil set filetype=tex
]])
vim.cmd([[
au BufNewFile,BufRead *.erl filetype indent off
]])
@ -96,19 +64,6 @@ vim.cmd([[
au BufNewFile,BufRead *.hrl filetype indent off
]])
vim.cmd([[
au BufNewFile,BufRead *.nix set tabstop=2 nosmarttab
]])
vim.cmd([[
au BufNewFile,BufRead *.ebnf set filetype=ebnf
]])
-- Telelscope Imports
local telescope = require('telescope')
local telescope_builtins = require('telescope.builtin')
local telescope_actions = require('telescope.actions')
--
--https://github.com/neovim/nvim-lspconfig/blob/master/doc/server_configurations.md
@ -168,30 +123,21 @@ local caps = vim.tbl_deep_extend(
local lspconfig = require("lspconfig")
local configure_lsp = function(name, config)
vim.lsp.enable(name)
vim.lsp.config(name, config)
end
-- Typst
configure_lsp('tinymist', {
capabilities = caps,
settings = {
exportPdf = "onSave",
},
})
-- Terraform lsp setup
configure_lsp('terraformls', {})
lspconfig.terraformls.setup {}
-- Nix language server support
configure_lsp('nil_ls', {
lspconfig.nil_ls.setup {
--single_file_support = true,
--on_attach = function(client, bufnr)
-- -- disable the semanticTokens because it has issues.
-- -- client.server_capabilities.semanticTokensProvider = nil
--end,
capabilities = caps,
})
}
require('roslyn').setup({
-- client, bufnr
on_attach = function(_, _)
on_attach = function (client, _)
--vim.notify(vim.inspect(client))
end,
sdk_framework = "net8.0",
@ -199,14 +145,55 @@ require('roslyn').setup({
log_level = "Trace",
});
-- Typescript language server support
configure_lsp('tsserver', {
cmd = { 'typescript-language-server', '--stdio' },
local vim_pid = vim.fn.getpid()
-- "FormatterOptions:EnableEditorConfigSupport=true"
--local omnisharp_cmd = { 'omnisharp', '--languageserver', '-v', '--hostPID', tostring(vim_pid), }
local function toSnakeCase(str)
return string.gsub(str, "%s*[- ]%s*", "_")
end
--lspconfig.omnisharp.setup {
-- cmd = omnisharp_cmd,
-- enable_roslyn_analyzers = true,
-- enable_editorconfig_support = true,
-- enable_import_completion = true,
-- -- Omnisharp has issues with the semanticTokens feature we need to massage it a bit.
-- on_attach = function(client, bufnr)
-- -- https://github.com/OmniSharp/omnisharp-roslyn/issues/2483#issuecomment-1492605642
-- local tokenModifiers = client.server_capabilities.semanticTokensProvider.legend.tokenModifiers
-- for i, v in ipairs(tokenModifiers) do
-- tokenModifiers[i] = toSnakeCase(v)
-- end
-- local tokenTypes = client.server_capabilities.semanticTokensProvider.legend.tokenTypes
-- for i, v in ipairs(tokenTypes) do
-- tokenTypes[i] = toSnakeCase(v)
-- end
-- end,
-- handlers = {
-- ["textDocument/definition"] = require('omnisharp_extended').handler,
-- },
-- capabilities = caps,
--}
--ocaml
lspconfig.ocamllsp.setup {
capabilities = caps
})
}
-- Java language server support
lspconfig.java_language_server.setup {
capabilities = caps
}
-- Typescript language server support
lspconfig.tsserver.setup {
capabilities = caps
}
-- Rust language server support
configure_lsp('rust_analyzer', {
lspconfig.rust_analyzer.setup {
settings = {
-- https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/generated_config.adoc
['rust-analyzer'] = {
@ -214,11 +201,10 @@ configure_lsp('rust_analyzer', {
}
},
capabilities = caps
})
}
-- lua language server setup.
configure_lsp('lua_ls', {
cmd = { 'lua-language-server' },
lspconfig.lua_ls.setup {
settings = {
Lua = {
runtime = { version = 'LuaJIT', },
@ -238,13 +224,7 @@ configure_lsp('lua_ls', {
},
},
capabilities = caps
})
configure_lsp('ty', {
cmd = { 'ty', 'server' },
filetypes = { 'python' },
root_markers = { 'ty.toml', 'pyproject.toml', '.git' },
})
}
-- lsp configuration
vim.api.nvim_create_autocmd('LspAttach', {
@ -256,22 +236,16 @@ vim.api.nvim_create_autocmd('LspAttach', {
vim.keymap.set({ "n", "v" }, "<Leader>a", vim.lsp.buf.code_action, opts)
vim.keymap.set("n", "<Leader>f", vim.lsp.buf.format, opts)
local client = vim.lsp.get_client_by_id(args.data.client_id)
---@diagnostic disable-next-line: undefined-field
if client and client.server_capabilities.codelens then
if client.server_capabilities.codelens then
vim.lsp.codelens.refresh()
end
-- formatexpr defaulted to the lsp provider by default recently
-- which breaks `gq` and company paragraph formatting in non lsp
-- contexts.
vim.bo[args.buf].formatexpr = ""
end,
})
vim.api.nvim_create_autocmd({ 'BufEnter', 'InsertLeave', 'CursorHold' }, {
callback = function(args)
local clients = vim.lsp.get_clients({ bufnr = args.buf })
callback = function(_)
local clients = vim.lsp.get_active_clients()
for cid = 1, #clients do
---@diagnostic disable-next-line: undefined-field
if clients[cid].server_capabilities.codelens then
vim.lsp.codelens.refresh()
break
@ -281,25 +255,31 @@ vim.api.nvim_create_autocmd({ 'BufEnter', 'InsertLeave', 'CursorHold' }, {
})
-- LSP Diagnostics Options Setup
local sign = function(opts)
vim.fn.sign_define(opts.name, {
texthl = opts.name,
text = opts.text,
numhl = ''
})
end
sign({ name = 'DiagnosticSignError', text = '🔥' })
sign({ name = 'DiagnosticSignWarn', text = '⚠️' })
sign({ name = 'DiagnosticSignHint', text = '➡️' })
sign({ name = 'DiagnosticSignInfo', text = '🗒️' })
vim.diagnostic.config({
virtual_text = false,
signs = true,
update_in_insert = true,
underline = true,
severity_sort = false,
float = {
border = 'rounded',
source = true,
source = 'always',
header = '',
prefix = '',
},
signs = {
text = {
[vim.diagnostic.severity.ERROR] = '🔥',
[vim.diagnostic.severity.WARN] = '⚠️',
[vim.diagnostic.severity.HINT] = '➡️',
[vim.diagnostic.severity.INFO] = '🗒️',
},
}
})
vim.cmd([[
@ -316,7 +296,7 @@ autocmd CursorHold * lua vim.diagnostic.open_float(nil, { focusable = false })
-- updatetime: set updatetime for CursorHold
vim.opt.completeopt = { 'menuone', 'noselect', 'noinsert' }
vim.opt.shortmess = vim.opt.shortmess + { c = true }
vim.api.nvim_set_option_value('updatetime', 300, { scope = "global" })
vim.api.nvim_set_option('updatetime', 300)
vim.opt.sessionoptions = { 'buffers', 'curdir', 'skiprtp', 'localoptions', 'terminal', 'tabpages' }
-- Fixed column for diagnostics to appear
-- Show autodiagnostic popup on cursor hover_range
@ -340,55 +320,31 @@ require('nvim-treesitter.configs').setup {
extended_mode = true,
max_file_lines = nil,
},
textobjects = {
move = {
enable = true,
set_jumps = true, -- whether to set jumps in the jumplist
goto_next_start = {
["]m"] = "@function.outer",
["]]"] = { query = "@class.outer", desc = "Next class start" },
--
-- You can use regex matching (i.e. lua pattern) and/or pass a list in a "query" key to group multiple queries.
["]o"] = "@loop.*",
-- ["]o"] = { query = { "@loop.inner", "@loop.outer" } }
--
-- You can pass a query group to use query from `queries/<lang>/<query_group>.scm file in your runtime path.
-- Below example nvim-treesitter's `locals.scm` and `folds.scm`. They also provide highlights.scm and indent.scm.
["]s"] = { query = "@local.scope", query_group = "locals", desc = "Next scope" },
["]z"] = { query = "@fold", query_group = "folds", desc = "Next fold" },
},
goto_next_end = {
["]M"] = "@function.outer",
["]["] = "@class.outer",
},
goto_previous_start = {
["[m"] = "@function.outer",
["[["] = "@class.outer",
},
goto_previous_end = {
["[M"] = "@function.outer",
["[]"] = "@class.outer",
},
-- Below will go to either the start or the end, whichever is closer.
-- Use if you want more granular movements
-- Make it even more gradual by adding multiple queries and regex.
goto_next = {
["]d"] = "@conditional.outer",
},
goto_previous = {
["[d"] = "@conditional.outer",
}
},
},
incremental_selection = {
enable = true,
keymaps = {
init_selection = '<Leader>c',
node_incremental = '<Leader>c',
scope_incremental = '<Leader>ci',
node_decremental = '<Leader>cx',
},
},
--textobjects = {
-- enable = true,
-- select = {
-- enable = true,
-- lookahead = true, -- Automatically jump forward to textobj, similar to targets.vim
-- keymaps = {
-- -- You can use the capture groups defined in textobjects.scm
-- ['aa'] = '@parameter.outer',
-- ['ia'] = '@parameter.inner',
-- ['af'] = '@function.outer',
-- ['if'] = '@function.inner',
-- ['ac'] = '@class.outer',
-- ['ic'] = '@class.inner',
-- },
-- },
--},
--incremental_selection = {
-- enable = true,
-- keymaps = {
-- init_selection = '<Leader>c',
-- node_incremental = '<Leader>c',
-- scope_incremental = '<Leader>ci',
-- node_decremental = '<Leader>cx',
-- },
--},
}
require 'treesitter-context'.setup {
@ -500,7 +456,7 @@ end)
-- Neogit integration
-- See https://github.com/NeogitOrg/neogit for configuration information.
local neogit = require('neogit')
neogit.setup {}
neogit.setup()
vim.keymap.set("n", "<Leader>mg", function()
neogit.open()
@ -511,6 +467,38 @@ vim.keymap.set("n", "<Leader>ga", function()
vim.cmd("!git add %")
end)
require('possession').setup {
commands = {
save = 'SSave',
load = 'SLoad',
delete = 'SDelete',
list = 'SList',
},
autosave = {
current = true,
on_load = true,
on_quit = true,
},
telescope = {
list = {
default_action = 'load',
mappings = {
save = { n = '<c-x>', i = '<c-x>' },
load = { n = '<c-v>', i = '<c-v>' },
delete = { n = '<c-t>', i = '<c-t>' },
rename = { n = '<c-r>', i = '<c-r>' },
},
},
},
}
-- Telelscope configuration
local telescope = require('telescope')
local telescope_builtins = require('telescope.builtin')
local telescope_actions = require('telescope.actions')
local telescope_state = require('telescope.actions.state')
telescope.load_extension('possession')
-- https://github.com/nvim-telescope/telescope.nvim
telescope.setup({
defaults = {
@ -532,24 +520,27 @@ telescope.setup({
},
})
local harpoon = require('harpoon')
harpoon:setup()
local lean = require 'lean'
lean.setup {
lsp = {
-- client, bufnr
on_attach = function(_, bufnr)
on_attach = function(client, bufnr)
local opts = { buffer = bufnr }
vim.keymap.set({ "n", "v" }, "<Leader>ti", function() vim.cmd("LeanInfoviewToggle") end, opts)
vim.keymap.set({ "n", "v" }, "<Leader>sg", function() vim.cmd("LeanGoal") end, opts)
vim.keymap.set({ "n", "v" }, "<Leader>stg", function() vim.cmd("LeanTermGoal") end, opts)
vim.api.nvim_set_option_value('omnifunc', 'v:lua.vim.lsp.omnifunc', { scope = "local", buf = bufnr })
vim.api.nvim_buf_set_option(bufnr, 'omnifunc', 'v:lua.vim.lsp.omnifunc')
end
},
mappings = true,
}
-- telescope keymaps
vim.keymap.set("n", "<Leader>pl", telescope.extensions.possession.list)
-- TODO(zaphar): Remove this once my muscle memory has set in.
vim.keymap.set("n", "<Leader>nff", telescope_builtins.fd)
vim.keymap.set("n", "<Leader>ff", telescope_builtins.fd)
@ -558,13 +549,15 @@ vim.keymap.set("n", "<Leader>rn", vim.lsp.buf.rename)
vim.keymap.set("n", "<Leader>sl", telescope_builtins.lsp_workspace_symbols)
vim.keymap.set("n", "<Leader>dl", telescope_builtins.diagnostics)
vim.keymap.set("n", "<Leader>rg", telescope_builtins.live_grep)
vim.keymap.set("n", "<Leader>bl", function()
telescope_builtins.buffers({
})
end)
vim.keymap.set("n", "<leader>lds", telescope_builtins.lsp_document_symbols, { desc = "[D]ocument [S]ymbols" })
vim.keymap.set("n", "<leader>lws", telescope_builtins.lsp_dynamic_workspace_symbols, { desc = "[W]orkspace [S]ymbols" })
vim.keymap.set("n", "<Leader>bl", function() telescope_builtins.buffers({
}) end)
-- harpoon keymaps
vim.keymap.set("n", "<Leader>ha", function() harpoon:list():append() end)
vim.keymap.set("n", "<Leader>he", function() harpoon.ui:toggle_quick_menu(harpoon:list()) end)
vim.keymap.set("n", "<Leader>hj", function() harpoon:list():prev() end)
vim.keymap.set("n", "<Leader>hk", function() harpoon:list():next() end)
-- codelens keymaps
vim.keymap.set("n", "<Leader>rr", vim.lsp.codelens.run)
@ -574,77 +567,6 @@ vim.keymap.set("n", "<Leader>tdb", function()
vim.cmd("DBUIToggle")
end)
require('lualine').setup {
icons_enabled = false,
disabled_filetypes = {
statusline = {},
winbar = {},
},
sections = {
-- left side
lualine_a = { 'mode' },
lualine_b = { 'filename' },
lualine_c = { 'encoding', 'fileformat', 'filetype' },
-- right side
lualine_x = { 'diagnostics' },
lualine_y = { 'progress', 'lsp_progress' },
lualine_z = { 'location' }
}
}
-- Hunk diff tree viewer and editor. Replacement for Meld and company
local hunk = require("hunk")
hunk.setup({
keys = {
global = {
quit = { "q" },
accept = { "<leader><Cr>" },
focus_tree = { "<leader>e" },
},
tree = {
expand_node = { "l", "<Right>" },
collapse_node = { "h", "<Left>" },
open_file = { "<Cr>" },
toggle_file = { "a" },
},
diff = {
toggle_line = { "a" },
toggle_hunk = { "A" },
},
},
ui = {
tree = {
-- Mode can either be `nested` or `flat`
mode = "nested",
width = 35,
},
--- Can be either `vertical` or `horizontal`
layout = "vertical",
},
icons = {
selected = "󰡖",
deselected = "",
partially_selected = "󰛲",
folder_open = "",
folder_closed = "",
},
-- Called right after each window and buffer are created.
--hooks = {
-- ---@param _context { buf: number, tree: NuiTree, opts: table }
-- on_tree_mount = function(_context) end,
-- ---@param _context { buf: number, win: number }
-- on_diff_mount = function(_context) end,
--},
})
local dap = require('dap')
dap.adapters.lldb = {
type = "executable",
@ -682,128 +604,3 @@ dap.configurations.cs = {
end,
},
}
local mcphub = require("mcphub")
mcphub.setup({
-- This sets vim.g.mcphub_auto_approve to false by default (can also be toggled from the HUB UI with `ga`)
config = vim.fn.expand("~/.config/mcphub/servers.json"),
auto_approve = true,
auto_toggle_mcp_servers = true, -- Let LLMs start and stop MCP servers automatically
extensions = {
avante = {
make_slash_commands = true, -- make /slash commands from MCP server prompts
},
},
cmd = "mcp-hub",
})
function get_server_list_prompt(hub_instance)
-- returns a list of mcp-servers with a `name` and a list of tools with `name`
local mcp_tool_prompt = "# MCP SERVERS\n\nThe Model Context Protocol (MCP) enables communication between the system and locally running MCP servers that provide additional tools and resources to extend your capabilities.\n\n# Connected MCP Servers\n\nWhen a server is connected, you can use the server's tools via the `use_mcp_tool` tool, and access the server's resources via the `access_mcp_resource` tool.\nNote: Server names are case sensitive and you should always use the exact full name like `Firecrawl MCP` or `src/user/main/time-mcp` etc\n\n"
if not hub_instance then
return ""
end
local servers = hub_instance:get_servers()
if not servers or #servers == 0 then
return ""
end
for _, server in ipairs(servers) do
mcp_tool_prompt = mcp_tool_prompt .. "## server name: `" .. server.name .. "`\n\n"
if server.capabilities and server.capabilities.tools and #server.capabilities.tools > 0 then
mcp_tool_prompt = mcp_tool_prompt .. "Available tools:\n\n"
for _, tool in ipairs(server.capabilities.tools) do
mcp_tool_prompt = mcp_tool_prompt .. "- tool name: `" .. tool.name .. "`\n"
if tool.description then
mcp_tool_prompt = mcp_tool_prompt .. " - Description: " .. tool.description .. "\n"
end
end
mcp_tool_prompt = mcp_tool_prompt .. "\n"
end
end
return mcp_tool_prompt
end
function make_avante_system_prompt(hub_instance)
return hub_instance and get_server_list_prompt(hub_instance) or ""
end
function update_avante_system_prompt()
local hub_instance = mcphub.get_hub_instance();
local system_prompt = make_avante_system_prompt(hub_instance)
if system_prompt then
require("avante.config").override({system_prompt = system_prompt})
end
end
vim.keymap.set("n", "<Leader>ab", function() require('avante').get().file_selector:add_buffer_files() end)
vim.keymap.set("n", "<Leader>af", function() require('avante').get().file_selector:add_current_buffer() end)
get_root_dir = function()
-- First try to get the root path from LSP
local bufnr = vim.api.nvim_get_current_buf()
local clients = vim.lsp.get_clients({ bufnr = bufnr })
-- Check if we have an active LSP client with a root_dir
for _, client in ipairs(clients) do
if client.config and client.config.root_dir then
return client.config.root_dir
end
end
-- Fall back to file-based detection
local root_file = vim.fs.find(function(name, path)
return name:match('(pyproject.toml|.sln|Cargo.toml|.git)$')
end, { upward = true })[1]
return root_file and vim.fs.dirname(root_file) or vim.fn.getcwd()
end
require('copilot').setup({
root_dir = get_root_dir,
})
require('avante').setup({
provider = "claude",
mode = "planning",
cursor_applying_provider = nil, -- default to whatever provider is configured
claude = {
endpoint = "https://api.anthropic.com",
model = "claude-3-7-sonnet-20250219",
timeout = 30000, -- Timeout in milliseconds
temperature = 0,
max_tokens = 20480,
},
copilot = {
model = "claude-3.7-sonnet",
},
behavior = {
enable_cursor_planning_mode = true,
},
windows = {
ask = {
start_insert=false,
focus_on_apply="theirs",
},
},
system_prompt = make_avante_system_prompt(mcphub.get_hub_instance()),
custom_tools = { require("mcphub.extensions.avante").mcp_tool() },
-- Disable these because we'll use the mcphub versions instead
--disabled_tools = {
-- "list_files", -- Built-in file operations
-- "search_files",
-- "read_file",
-- "create_file",
-- "rename_file",
-- "delete_file",
-- "create_dir",
-- "rename_dir",
-- "delete_dir",
-- "bash", -- Built-in terminal access
--},
})

View File

@ -1,24 +1,5 @@
{ pkgs, lib, config, ...}:
with lib;
let
mkLauncher = import ../../packages/darwin-launcher.nix { inherit pkgs; };
durnitispOutPath = config.services.durnitisp.stdoutPath;
durnitispErrPath = config.services.durnitisp.stderrPath;
durnitispPidPath = config.services.durnitisp.pidPath;
durnitispLauncher = mkLauncher ''
exec ${pkgs.clio}/bin/clio \
--out-path=${durnitispOutPath} \
--err-path=${durnitispErrPath} \
--pid-file=${durnitispPidPath} \
--paranoid \
-- \
${pkgs.durnitisp}/bin/durnitisp \
--listenHost=${config.services.durnitisp.listen} \
--stunHosts="stun.ekiga.net:3478,stun.schlund.de:3478,stun.voipbuster.com:3478,stun.voipstunt.com:3478,stun.xten.com:3478" \
--pingHosts="google.com,prod.actual.battle.net"
'';
in
{
options.services.node-exporter = {
@ -153,6 +134,10 @@ in
heraclesErrPath = config.services.heracles.stderrPath;
heraclesPidPath = config.services.heracles.pidPath;
durnitispOutPath = config.services.durnitisp.stdoutPath;
durnitispErrPath = config.services.durnitisp.stderrPath;
durnitispPidPath = config.services.durnitisp.pidPath;
prometheusOutPath = config.services.prometheus.stdoutPath;
prometheusErrPath = config.services.prometheus.stderrPath;
prometheusPidPath = config.services.prometheus.pidPath;
@ -201,10 +186,18 @@ in
launchd.daemons.durnitisp = mkIf config.services.durnitisp.enable {
serviceConfig = {
ProgramArguments = [
"${durnitispLauncher}"
"${pkgs.clio}/bin/clio"
"--out-path=${durnitispOutPath}"
"--err-path=${durnitispErrPath}"
"--pid-file=${durnitispPidPath}"
"--paranoid"
"--"
"${pkgs.durnitisp}/bin/durnitisp"
"--listenHost=${config.services.durnitisp.listen}"
];
#StandardErrorPath = "/var/log/clio.durnitisp.err";
#StandardOutPath = "/var/log/clio.durnitisp.out";
KeepAlive = true;
RunAtLoad = true;
};
};
@ -212,21 +205,16 @@ in
launchd.user.agents.prometheus = mkIf config.services.prometheus.enable {
serviceConfig = {
ProgramArguments = [
"/bin/sh"
"-c"
''
/bin/wait4path ${pkgs.clio}/bin/clio &amp;&amp; \
exec ${pkgs.clio}/bin/clio \
--out-path=${prometheusOutPath} \
--err-path=${prometheusErrPath} \
--pid-file=${prometheusPidPath} \
--paranoid \
-- \
${pkgs.prometheus}/bin/prometheus \
--web.listen-address=${config.services.prometheus.listen} \
--config.file=/etc/${config.environment.etc."prometheus.yaml".target} \
--storage.tsdb.path=${config.services.prometheus.dataPath}/data \
''
"${pkgs.clio}/bin/clio"
"--out-path=${prometheusOutPath}"
"--err-path=${prometheusErrPath}"
"--pid-file=${prometheusPidPath}"
"--paranoid"
"--"
"${pkgs.prometheus}/bin/prometheus"
"--web.listen-address=${config.services.prometheus.listen}"
"--config.file=/etc/${config.environment.etc."prometheus.yaml".target}"
"--storage.tsdb.path=${config.services.prometheus.dataPath}/data"
];
WorkingDirectory = config.services.prometheus.dataPath;
WatchPaths = [

View File

@ -0,0 +1,24 @@
{ config, lib, pkgs, ...}:
with lib;
let
userFiles = mapAttrs (u: fs: (filter (f: f.enable) (attrValues fs))) (attrValues config.homeDir);
in
{
options = {
homeDir = mkOption {
default = {};
description = ''
Users with files to link into their homedirectory.
{ "user1" = {
".git/config" = {
enable = true;
};
};
};
'';
};
};
config = {
};
}

View File

@ -1,26 +0,0 @@
{pkgs, lib, config, ...}:
with lib;
let
mkLauncher = import ../../packages/darwin-launcher.nix { inherit pkgs; };
lorriLauncher = mkLauncher ''
source ${config.system.build.setEnvironment}
exec ${pkgs.lorri}/bin/lorri daemon
'';
in
{
options.services.my-lorri = {
enable = mkEnableOption "Enable the lorri agent";
};
config = {
launchd.user.agents.lorri = mkIf config.services.my-lorri.enable {
serviceConfig = {
ProgramArguments = [
"${lorriLauncher}"
];
RunAtLoad = true;
KeepAlive = true;
};
};
};
}

View File

@ -1,56 +0,0 @@
{pkgs, lib, config, ...}:
with lib;
let
mkLauncher = import ../../packages/darwin-launcher.nix { inherit pkgs; };
ollamaLauncher = mkLauncher ''
exec ${pkgs.clio}/bin/clio \
--out-path=${config.services.ollama.stdoutPath} \
--err-path=${config.services.ollama.stdoutPath} \
--pid-file=${config.services.ollama.pidPath} \
--paranoid \
-- \
${pkgs.ollama}/bin/ollama \
serve
'';
in
{
options.services.ollama = {
enable = mkEnableOption "Enable the ollama agent";
stdoutPath = mkOption {
default = "/Users/${config.services.ollama.user}/config/ollama/out.log";
};
stderrPath = mkOption {
default = "/Users/${config.services.ollama.user}/config/ollama/err.log";
};
pidPath = mkOption {
default = "/Users/${config.services.ollama.user}/config/ollama/ollama.pid";
};
user = mkOption {
default="zaphar";
};
};
config = {
launchd.user.agents.ollama = mkIf config.services.ollama.enable {
serviceConfig = {
ProgramArguments = [
"${ollamaLauncher}"
];
EnvironmentVariables = {
"OLLAMA_HOST" = "127.0.0.1:11434";
"OLLAMA_MODELS" = "/Users/${config.services.ollama.user}/config/ollama/";
};
RunAtLoad = true;
};
};
environment.etc."newsyslog.d/org.nixos.ollama.conf" = mkIf config.services.ollama.enable {
text = ''
# logfilename [owner:group] mode count size when flags [/pid_file] [sig_num]
${config.services.ollama.stdoutPath} zaphar:staff 644 10 1000 * BJ ${config.services.ollama.pidPath} 1
${config.services.ollama.stderrPath} zaphar:staff 644 10 1000 * BJ ${config.services.ollama.pidPath} 1
'';
};
};
}

View File

@ -1,20 +1,5 @@
{pkgs, lib, config, ...}:
with lib;
let
mkLauncher = import ../../packages/darwin-launcher.nix { inherit pkgs; };
vectorLauncher = mkLauncher ''
exec ${pkgs.clio}/bin/clio \
--out-path=${config.services.vector.stdoutPath} \
--err-path=${config.services.vector.stdoutPath} \
--pid-file=${config.services.vector.pidPath} \
--paranoid \
-- \
${pkgs.vector}/bin/vector \
--verbose \
--watch-config \
--config=/etc/${config.environment.etc."vector.yaml".target}
'';
in
{
options.services.vector = {
enable = mkEnableOption "Enable the vector agent";
@ -43,22 +28,18 @@ in
launchd.daemons.vector = mkIf config.services.vector.enable {
serviceConfig = {
Program = "${pkgs.vector}/bin/vector";
ProgramArguments = [
"${vectorLauncher}"
"--verbose"
"--watch-config"
"--config=/etc/${config.environment.etc."vector.yaml".target}"
];
WatchPaths= [
"/etc/${config.environment.etc."vector.yaml".target}"
];
KeepAlive = true;
RunAtLoad = true;
};
};
environment.etc."newsyslog.d/org.nixos.vector.conf" = mkIf config.services.vector.enable {
text = ''
# logfilename [owner:group] mode count size when flags [/pid_file] [sig_num]
${config.services.vector.stdoutPath} zaphar:staff 644 10 1000 * BJ ${config.services.vector.pidPath} 1
${config.services.vector.stderrPath} zaphar:staff 644 10 1000 * BJ ${config.services.vector.pidPath} 1
'';
};
};
}

View File

@ -8,14 +8,6 @@ with lib;
description = "Logging directory path for victoria-logs service";
default = "/Users/Zaphar/opt/victoria-logs";
};
maxStorageSize = mkOption {
description = "Maximum storage size on disk [KB, MB, GB]";
default = "200GiB";
};
retentionTime = mkOption {
description = "Timespan of logs to retain, Logs older than this will be dropped.";
default = "5d";
};
listenAddr = mkOption {
description = "Socket Address to listen on";
default = "127.0.0.1:9428";
@ -49,8 +41,6 @@ with lib;
"${pkgs.victoria-logs}/bin/victoria-logs"
"-logNewStreams"
"-storageDataPath=${config.services.victoria-logs.dataPath}/data"
"-retention.maxDiskSpaceUsageBytes=${config.services.victoria-logs.maxStorageSize}"
"-retentionPeriod=${config.services.victoria-logs.retentionTime}"
"-httpListenAddr=${config.services.victoria-logs.listenAddr}"
];
KeepAlive = true;

View File

@ -1,16 +1,11 @@
{ buildGrammar, fetchFromGitHub, grammarToPlugin, }:
let grammar = buildGrammar {
language = "c_sharp";
version = "0.23.1-master";
{ buildGrammar, fetchFromGitHub, pkgs, }:
buildGrammar {
language = "c-sharp";
version = "0.20.0-master";
src = fetchFromGitHub {
owner = "tree-sitter";
repo = "tree-sitter-c-sharp";
rev = "b27b091bfdc5f16d0ef76421ea5609c82a57dff0";
hash = "sha256-kSbMv6fKELB5CTSevD1umUgKfb3rsucEnAVYHFiAHss=";
};
};
in
{
inherit grammar;
neovim-plugin = grammarToPlugin grammar;
owner = "tree-sitter";
repo = "tree-sitter-c-sharp";
rev = "1648e21b4f087963abf0101ee5221bb413107b07";
hash = "sha256-WvkHtw8t14UNqiJvmS9dbGYQSVVzHS9mcWzxq+KLMnU=";
};
}

View File

@ -10,8 +10,7 @@ set-option -g prefix C-a
#sest -g utf8 on
## we like colors
set -g default-terminal xterm-256color
set -ga terminal-overrides ",xterm-256color:RGB"
set -g default-terminal "screen-256color"
## Useful KeyBindings

View File

@ -1,54 +0,0 @@
set -o vi
export PROMPT='%F{green}[%T] %F{cyan}(%n@%m) %F{lightgrey}(%y) %F{cyan} %~
%F{white}%(!.>>.$>) '
# Alwoys use C-R for history search backward
bindkey '^R' history-incremental-search-backward
# Opam configurattion
[[ ! -r ~/.opam/opam-init/init.zsh ]] || source ~/.opam/opam-init/init.zsh > /dev/null 2> /dev/null
# dotnet stuff
if [ -f "$HOME/Library/Application Support/dnvm/env" ]; then
. "$HOME/Library/Application Support/dnvm/env"
fi
function service_restart() {
launchctl kickstart -k $1
}
function root_shell() {
sudo -E zsh -i /etc/zshrc
}
function wrkspc() {
local wd=${1:-$(pwd)}
cd ${wd}
local session=$(basename $wd)
tmux new -A -s $session
}
function kill_service_on_port() {
local port=$1
echo killings pids for port ${port}
for p in $(sudo lsof -nP -iTCP -sTCP:LISTEN | grep ${port} | awk '{ print $2 }'); do
echo killing pid: $p;
kill -9 $p;
done
}
function disk_usage() {
local path=${1:-.}
/run/current-system/sw/bin/du --max-depth=1 -h "${path}" 2>/dev/null
}
# TODO(zaphar): Figure out why this thing doesn't come up or dies
function restart_durnitisp() {
sudo launchctl bootout system/org.nixos.durnitisp
sudo launchctl bootstrap system /Library/LaunchDaemons/org.nixos.durnitisp.plist org.nixos.durnitisp
}
eval "$(direnv hook zsh)"
export ANTHROPIC_API_KEY="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"
export OPENAI_API_KEY="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"

View File

@ -1,6 +0,0 @@
{ pkgs }:
with pkgs;
contents: writeShellScript "launcher.sh" ''
/bin/wait4path /nix/store && \
${contents}
''

View File

@ -1,17 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs_18"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.cctools or pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

View File

@ -1,689 +0,0 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Fixup all executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
if isScript "$file"
then
sed -i 's/\r$//' "$file" # convert crlf to lf
fi
done
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version ? null
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View File

@ -1,4 +0,0 @@
[
"@informalsystems/quint",
"@informalsystems/quint-language-server"
]

View File

@ -1,970 +0,0 @@
# This file has been generated by node2nix 1.11.1. Do not edit!
{nodeEnv, fetchurl, fetchgit, nix-gitignore, stdenv, lib, globalBuildInputs ? []}:
let
sources = {
"@grpc/grpc-js-1.11.1" = {
name = "_at_grpc_slash_grpc-js";
packageName = "@grpc/grpc-js";
version = "1.11.1";
src = fetchurl {
url = "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.11.1.tgz";
sha512 = "gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==";
};
};
"@grpc/proto-loader-0.7.13" = {
name = "_at_grpc_slash_proto-loader";
packageName = "@grpc/proto-loader";
version = "0.7.13";
src = fetchurl {
url = "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz";
sha512 = "AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==";
};
};
"@informalsystems/quint-0.20.0" = {
name = "_at_informalsystems_slash_quint";
packageName = "@informalsystems/quint";
version = "0.20.0";
src = fetchurl {
url = "https://registry.npmjs.org/@informalsystems/quint/-/quint-0.20.0.tgz";
sha512 = "q3jxvzVw0hw4yxOJcGzAMIguGfcyScs/BP5efqCxQeVMDxLsqy+SAgLH5CnM7UNJFXLawwuyRaCbIvq9+2BXRg==";
};
};
"@js-sdsl/ordered-map-4.4.2" = {
name = "_at_js-sdsl_slash_ordered-map";
packageName = "@js-sdsl/ordered-map";
version = "4.4.2";
src = fetchurl {
url = "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz";
sha512 = "iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==";
};
};
"@octokit/endpoint-9.0.5" = {
name = "_at_octokit_slash_endpoint";
packageName = "@octokit/endpoint";
version = "9.0.5";
src = fetchurl {
url = "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.5.tgz";
sha512 = "ekqR4/+PCLkEBF6qgj8WqJfvDq65RH85OAgrtnVp1mSxaXF03u2xW/hUdweGS5654IlC0wkNYC18Z50tSYTAFw==";
};
};
"@octokit/openapi-types-22.2.0" = {
name = "_at_octokit_slash_openapi-types";
packageName = "@octokit/openapi-types";
version = "22.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz";
sha512 = "QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==";
};
};
"@octokit/request-8.4.0" = {
name = "_at_octokit_slash_request";
packageName = "@octokit/request";
version = "8.4.0";
src = fetchurl {
url = "https://registry.npmjs.org/@octokit/request/-/request-8.4.0.tgz";
sha512 = "9Bb014e+m2TgBeEJGEbdplMVWwPmL1FPtggHQRkV+WVsMggPtEkLKPlcVYm/o8xKLkpJ7B+6N8WfQMtDLX2Dpw==";
};
};
"@octokit/request-error-5.1.0" = {
name = "_at_octokit_slash_request-error";
packageName = "@octokit/request-error";
version = "5.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.0.tgz";
sha512 = "GETXfE05J0+7H2STzekpKObFe765O5dlAKUTLNGeH+x47z7JjXHfsHKo5z21D/o/IOZTUEI6nyWyR+bZVP/n5Q==";
};
};
"@octokit/types-13.5.0" = {
name = "_at_octokit_slash_types";
packageName = "@octokit/types";
version = "13.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz";
sha512 = "HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==";
};
};
"@protobufjs/aspromise-1.1.2" = {
name = "_at_protobufjs_slash_aspromise";
packageName = "@protobufjs/aspromise";
version = "1.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz";
sha512 = "j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==";
};
};
"@protobufjs/base64-1.1.2" = {
name = "_at_protobufjs_slash_base64";
packageName = "@protobufjs/base64";
version = "1.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz";
sha512 = "AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==";
};
};
"@protobufjs/codegen-2.0.4" = {
name = "_at_protobufjs_slash_codegen";
packageName = "@protobufjs/codegen";
version = "2.0.4";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz";
sha512 = "YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==";
};
};
"@protobufjs/eventemitter-1.1.0" = {
name = "_at_protobufjs_slash_eventemitter";
packageName = "@protobufjs/eventemitter";
version = "1.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz";
sha512 = "j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==";
};
};
"@protobufjs/fetch-1.1.0" = {
name = "_at_protobufjs_slash_fetch";
packageName = "@protobufjs/fetch";
version = "1.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz";
sha512 = "lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==";
};
};
"@protobufjs/float-1.0.2" = {
name = "_at_protobufjs_slash_float";
packageName = "@protobufjs/float";
version = "1.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz";
sha512 = "Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==";
};
};
"@protobufjs/inquire-1.1.0" = {
name = "_at_protobufjs_slash_inquire";
packageName = "@protobufjs/inquire";
version = "1.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz";
sha512 = "kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==";
};
};
"@protobufjs/path-1.1.2" = {
name = "_at_protobufjs_slash_path";
packageName = "@protobufjs/path";
version = "1.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz";
sha512 = "6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==";
};
};
"@protobufjs/pool-1.1.0" = {
name = "_at_protobufjs_slash_pool";
packageName = "@protobufjs/pool";
version = "1.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz";
sha512 = "0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==";
};
};
"@protobufjs/utf8-1.1.0" = {
name = "_at_protobufjs_slash_utf8";
packageName = "@protobufjs/utf8";
version = "1.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz";
sha512 = "Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==";
};
};
"@sweet-monads/either-3.2.0" = {
name = "_at_sweet-monads_slash_either";
packageName = "@sweet-monads/either";
version = "3.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/@sweet-monads/either/-/either-3.2.0.tgz";
sha512 = "n+nR0b60GRTKb+D76qhTf4NEBXU9zfpigYYEtKtSYbV+5+i5gxr9jFd64pYkY2O7hVsb/G7nspbAeFni/i1ltA==";
};
};
"@sweet-monads/interfaces-3.3.0" = {
name = "_at_sweet-monads_slash_interfaces";
packageName = "@sweet-monads/interfaces";
version = "3.3.0";
src = fetchurl {
url = "https://registry.npmjs.org/@sweet-monads/interfaces/-/interfaces-3.3.0.tgz";
sha512 = "66akGvjPD4lizQy+w4JSltJilc2w/QPdw8lPAniLJGHwyjmrw9xMJLx76Q/GDnbCU59Werses4aZJLWOlJrL5A==";
};
};
"@sweet-monads/maybe-3.2.0" = {
name = "_at_sweet-monads_slash_maybe";
packageName = "@sweet-monads/maybe";
version = "3.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/@sweet-monads/maybe/-/maybe-3.2.0.tgz";
sha512 = "/t+K0D/kBfkYOkZaePEsrK868at0M9UIEVgehcM0xscrCSZhKWGteE41vl2XJQqh8WyiFo/mZ5y7eAPSYzS+pg==";
};
};
"@types/line-column-1.0.2" = {
name = "_at_types_slash_line-column";
packageName = "@types/line-column";
version = "1.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/@types/line-column/-/line-column-1.0.2.tgz";
sha512 = "099oFQmp/Tlf20xW5XI5R4F69N6lF/zQ09XDzc3R5BOLFlqIotgKoNIyj0HD4fQLWcGDreDJv8k/BkLJscrDrw==";
};
};
"@types/lodash-4.17.7" = {
name = "_at_types_slash_lodash";
packageName = "@types/lodash";
version = "4.17.7";
src = fetchurl {
url = "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.7.tgz";
sha512 = "8wTvZawATi/lsmNu10/j2hk1KEP0IvjubqPE3cu1Xz7xfXXt5oCq3SNUz4fMIP4XGF9Ky+Ue2tBA3hcS7LSBlA==";
};
};
"@types/lodash.clonedeep-4.5.0" = {
name = "_at_types_slash_lodash.clonedeep";
packageName = "@types/lodash.clonedeep";
version = "4.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/@types/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz";
sha512 = "IHijjFVPJTvzvrNPz+6nQy5lZQb7uh807RfTIEaQBrZXrIGjZy0L2dEb3hju34J0eqbXLCY6Hub/g81Jl4pGCA==";
};
};
"@types/node-22.5.1" = {
name = "_at_types_slash_node";
packageName = "@types/node";
version = "22.5.1";
src = fetchurl {
url = "https://registry.npmjs.org/@types/node/-/node-22.5.1.tgz";
sha512 = "KkHsxej0j9IW1KKOOAA/XBA0z08UFSrRQHErzEfA3Vgq57eXIMYboIlHJuYIfd+lwCQjtKqUu3UnmKbtUc9yRw==";
};
};
"@types/seedrandom-3.0.8" = {
name = "_at_types_slash_seedrandom";
packageName = "@types/seedrandom";
version = "3.0.8";
src = fetchurl {
url = "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-3.0.8.tgz";
sha512 = "TY1eezMU2zH2ozQoAFAQFOPpvP15g+ZgSfTZt31AUUH/Rxtnz3H+A/Sv1Snw2/amp//omibc+AEkTaA8KUeOLQ==";
};
};
"ansi-regex-5.0.1" = {
name = "ansi-regex";
packageName = "ansi-regex";
version = "5.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz";
sha512 = "quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==";
};
};
"ansi-styles-4.3.0" = {
name = "ansi-styles";
packageName = "ansi-styles";
version = "4.3.0";
src = fetchurl {
url = "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz";
sha512 = "zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==";
};
};
"antlr4ts-0.5.0-alpha.4" = {
name = "antlr4ts";
packageName = "antlr4ts";
version = "antlr4ts-0.5.0-alpha.4";
src = fetchurl {
url = "https://registry.npmjs.org/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz";
sha512 = "sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ==";
};
};
"bignumber.js-9.1.2" = {
name = "bignumber.js";
packageName = "bignumber.js";
version = "9.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz";
sha512 = "2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==";
};
};
"buffer-from-1.1.2" = {
name = "buffer-from";
packageName = "buffer-from";
version = "1.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz";
sha512 = "E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==";
};
};
"chalk-4.1.2" = {
name = "chalk";
packageName = "chalk";
version = "4.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz";
sha512 = "oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==";
};
};
"chownr-2.0.0" = {
name = "chownr";
packageName = "chownr";
version = "2.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz";
sha512 = "bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==";
};
};
"cliui-8.0.1" = {
name = "cliui";
packageName = "cliui";
version = "8.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz";
sha512 = "BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==";
};
};
"color-convert-2.0.1" = {
name = "color-convert";
packageName = "color-convert";
version = "2.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz";
sha512 = "RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==";
};
};
"color-name-1.1.4" = {
name = "color-name";
packageName = "color-name";
version = "1.1.4";
src = fetchurl {
url = "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz";
sha512 = "dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==";
};
};
"deprecation-2.3.1" = {
name = "deprecation";
packageName = "deprecation";
version = "2.3.1";
src = fetchurl {
url = "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz";
sha512 = "xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==";
};
};
"emoji-regex-8.0.0" = {
name = "emoji-regex";
packageName = "emoji-regex";
version = "8.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz";
sha512 = "MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==";
};
};
"eol-0.9.1" = {
name = "eol";
packageName = "eol";
version = "0.9.1";
src = fetchurl {
url = "https://registry.npmjs.org/eol/-/eol-0.9.1.tgz";
sha512 = "Ds/TEoZjwggRoz/Q2O7SE3i4Jm66mqTDfmdHdq/7DKVk3bro9Q8h6WdXKdPqFLMoqxrDK5SVRzHVPOS6uuGtrg==";
};
};
"escalade-3.2.0" = {
name = "escalade";
packageName = "escalade";
version = "3.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz";
sha512 = "WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==";
};
};
"fs-minipass-2.1.0" = {
name = "fs-minipass";
packageName = "fs-minipass";
version = "2.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz";
sha512 = "V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==";
};
};
"get-caller-file-2.0.5" = {
name = "get-caller-file";
packageName = "get-caller-file";
version = "2.0.5";
src = fetchurl {
url = "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz";
sha512 = "DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==";
};
};
"has-flag-4.0.0" = {
name = "has-flag";
packageName = "has-flag";
version = "4.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz";
sha512 = "EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==";
};
};
"immutable-4.3.7" = {
name = "immutable";
packageName = "immutable";
version = "4.3.7";
src = fetchurl {
url = "https://registry.npmjs.org/immutable/-/immutable-4.3.7.tgz";
sha512 = "1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==";
};
};
"is-fullwidth-code-point-3.0.0" = {
name = "is-fullwidth-code-point";
packageName = "is-fullwidth-code-point";
version = "3.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz";
sha512 = "zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==";
};
};
"isarray-1.0.0" = {
name = "isarray";
packageName = "isarray";
version = "1.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz";
sha512 = "VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==";
};
};
"isobject-2.1.0" = {
name = "isobject";
packageName = "isobject";
version = "2.1.0";
src = fetchurl {
url = "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz";
sha512 = "+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==";
};
};
"json-bigint-1.0.0" = {
name = "json-bigint";
packageName = "json-bigint";
version = "1.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz";
sha512 = "SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==";
};
};
"line-column-1.0.2" = {
name = "line-column";
packageName = "line-column";
version = "1.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/line-column/-/line-column-1.0.2.tgz";
sha512 = "Ktrjk5noGYlHsVnYWh62FLVs4hTb8A3e+vucNZMgPeAOITdshMSgv4cCZQeRDjm7+goqmo6+liZwTXo+U3sVww==";
};
};
"lodash-4.17.21" = {
name = "lodash";
packageName = "lodash";
version = "4.17.21";
src = fetchurl {
url = "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz";
sha512 = "v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==";
};
};
"lodash.camelcase-4.3.0" = {
name = "lodash.camelcase";
packageName = "lodash.camelcase";
version = "4.3.0";
src = fetchurl {
url = "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz";
sha512 = "TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==";
};
};
"lodash.clonedeep-4.5.0" = {
name = "lodash.clonedeep";
packageName = "lodash.clonedeep";
version = "4.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz";
sha512 = "H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ==";
};
};
"lodash.isequal-4.5.0" = {
name = "lodash.isequal";
packageName = "lodash.isequal";
version = "4.5.0";
src = fetchurl {
url = "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz";
sha512 = "pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==";
};
};
"long-5.2.3" = {
name = "long";
packageName = "long";
version = "5.2.3";
src = fetchurl {
url = "https://registry.npmjs.org/long/-/long-5.2.3.tgz";
sha512 = "lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==";
};
};
"minipass-3.3.6" = {
name = "minipass";
packageName = "minipass";
version = "3.3.6";
src = fetchurl {
url = "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz";
sha512 = "DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==";
};
};
"minipass-5.0.0" = {
name = "minipass";
packageName = "minipass";
version = "5.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz";
sha512 = "3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==";
};
};
"minizlib-2.1.2" = {
name = "minizlib";
packageName = "minizlib";
version = "2.1.2";
src = fetchurl {
url = "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz";
sha512 = "bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==";
};
};
"mkdirp-1.0.4" = {
name = "mkdirp";
packageName = "mkdirp";
version = "1.0.4";
src = fetchurl {
url = "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz";
sha512 = "vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==";
};
};
"once-1.4.0" = {
name = "once";
packageName = "once";
version = "1.4.0";
src = fetchurl {
url = "https://registry.npmjs.org/once/-/once-1.4.0.tgz";
sha512 = "lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==";
};
};
"protobufjs-7.4.0" = {
name = "protobufjs";
packageName = "protobufjs";
version = "7.4.0";
src = fetchurl {
url = "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz";
sha512 = "mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==";
};
};
"require-directory-2.1.1" = {
name = "require-directory";
packageName = "require-directory";
version = "2.1.1";
src = fetchurl {
url = "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz";
sha512 = "fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==";
};
};
"seedrandom-3.0.5" = {
name = "seedrandom";
packageName = "seedrandom";
version = "3.0.5";
src = fetchurl {
url = "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz";
sha512 = "8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==";
};
};
"source-map-0.6.1" = {
name = "source-map";
packageName = "source-map";
version = "0.6.1";
src = fetchurl {
url = "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz";
sha512 = "UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==";
};
};
"source-map-support-0.5.21" = {
name = "source-map-support";
packageName = "source-map-support";
version = "0.5.21";
src = fetchurl {
url = "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz";
sha512 = "uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==";
};
};
"string-width-4.2.3" = {
name = "string-width";
packageName = "string-width";
version = "4.2.3";
src = fetchurl {
url = "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz";
sha512 = "wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==";
};
};
"strip-ansi-6.0.1" = {
name = "strip-ansi";
packageName = "strip-ansi";
version = "6.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz";
sha512 = "Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==";
};
};
"supports-color-7.2.0" = {
name = "supports-color";
packageName = "supports-color";
version = "7.2.0";
src = fetchurl {
url = "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz";
sha512 = "qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==";
};
};
"tar-6.2.1" = {
name = "tar";
packageName = "tar";
version = "6.2.1";
src = fetchurl {
url = "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz";
sha512 = "DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==";
};
};
"undici-types-6.19.8" = {
name = "undici-types";
packageName = "undici-types";
version = "6.19.8";
src = fetchurl {
url = "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz";
sha512 = "ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==";
};
};
"universal-user-agent-6.0.1" = {
name = "universal-user-agent";
packageName = "universal-user-agent";
version = "6.0.1";
src = fetchurl {
url = "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz";
sha512 = "yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==";
};
};
"vscode-jsonrpc-6.0.0" = {
name = "vscode-jsonrpc";
packageName = "vscode-jsonrpc";
version = "6.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-6.0.0.tgz";
sha512 = "wnJA4BnEjOSyFMvjZdpiOwhSq9uDoK8e/kpRJDTaMYzwlkrhG1fwDIZI94CLsLzlCK5cIbMMtFlJlfR57Lavmg==";
};
};
"vscode-languageserver-7.0.0" = {
name = "vscode-languageserver";
packageName = "vscode-languageserver";
version = "7.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-7.0.0.tgz";
sha512 = "60HTx5ID+fLRcgdHfmz0LDZAXYEV68fzwG0JWwEPBode9NuMYTIxuYXPg4ngO8i8+Ou0lM7y6GzaYWbiDL0drw==";
};
};
"vscode-languageserver-protocol-3.16.0" = {
name = "vscode-languageserver-protocol";
packageName = "vscode-languageserver-protocol";
version = "3.16.0";
src = fetchurl {
url = "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.16.0.tgz";
sha512 = "sdeUoAawceQdgIfTI+sdcwkiK2KU+2cbEYA0agzM2uqaUy2UpnnGHtWTHVEtS0ES4zHU0eMFRGN+oQgDxlD66A==";
};
};
"vscode-languageserver-textdocument-1.0.12" = {
name = "vscode-languageserver-textdocument";
packageName = "vscode-languageserver-textdocument";
version = "1.0.12";
src = fetchurl {
url = "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz";
sha512 = "cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==";
};
};
"vscode-languageserver-types-3.16.0" = {
name = "vscode-languageserver-types";
packageName = "vscode-languageserver-types";
version = "3.16.0";
src = fetchurl {
url = "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.16.0.tgz";
sha512 = "k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==";
};
};
"vscode-uri-3.0.8" = {
name = "vscode-uri";
packageName = "vscode-uri";
version = "3.0.8";
src = fetchurl {
url = "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz";
sha512 = "AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==";
};
};
"wrap-ansi-7.0.0" = {
name = "wrap-ansi";
packageName = "wrap-ansi";
version = "7.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz";
sha512 = "YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==";
};
};
"wrappy-1.0.2" = {
name = "wrappy";
packageName = "wrappy";
version = "1.0.2";
src = fetchurl {
url = "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz";
sha512 = "l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==";
};
};
"y18n-5.0.8" = {
name = "y18n";
packageName = "y18n";
version = "5.0.8";
src = fetchurl {
url = "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz";
sha512 = "0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==";
};
};
"yallist-4.0.0" = {
name = "yallist";
packageName = "yallist";
version = "4.0.0";
src = fetchurl {
url = "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz";
sha512 = "3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==";
};
};
"yargs-17.7.2" = {
name = "yargs";
packageName = "yargs";
version = "17.7.2";
src = fetchurl {
url = "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz";
sha512 = "7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==";
};
};
"yargs-parser-21.1.1" = {
name = "yargs-parser";
packageName = "yargs-parser";
version = "21.1.1";
src = fetchurl {
url = "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz";
sha512 = "tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==";
};
};
};
in
{
"@informalsystems/quint" = nodeEnv.buildNodePackage {
name = "_at_informalsystems_slash_quint";
packageName = "@informalsystems/quint";
version = "0.21.1";
src = fetchurl {
url = "https://registry.npmjs.org/@informalsystems/quint/-/quint-0.21.1.tgz";
sha512 = "kqXHC4a+6N1L0UDFzHknryLmOGwEuLk5pQyiirAKyCvkSHGn6QmnZTdn0jJI2bUppA0BS1lANTLrC2UQOGdKQg==";
};
dependencies = [
sources."@grpc/grpc-js-1.11.1"
sources."@grpc/proto-loader-0.7.13"
sources."@js-sdsl/ordered-map-4.4.2"
sources."@octokit/endpoint-9.0.5"
sources."@octokit/openapi-types-22.2.0"
sources."@octokit/request-8.4.0"
sources."@octokit/request-error-5.1.0"
sources."@octokit/types-13.5.0"
sources."@protobufjs/aspromise-1.1.2"
sources."@protobufjs/base64-1.1.2"
sources."@protobufjs/codegen-2.0.4"
sources."@protobufjs/eventemitter-1.1.0"
sources."@protobufjs/fetch-1.1.0"
sources."@protobufjs/float-1.0.2"
sources."@protobufjs/inquire-1.1.0"
sources."@protobufjs/path-1.1.2"
sources."@protobufjs/pool-1.1.0"
sources."@protobufjs/utf8-1.1.0"
sources."@sweet-monads/either-3.2.0"
sources."@sweet-monads/interfaces-3.3.0"
sources."@sweet-monads/maybe-3.2.0"
sources."@types/line-column-1.0.2"
sources."@types/lodash-4.17.7"
sources."@types/lodash.clonedeep-4.5.0"
sources."@types/node-22.5.1"
sources."@types/seedrandom-3.0.8"
sources."ansi-regex-5.0.1"
sources."ansi-styles-4.3.0"
sources."antlr4ts-0.5.0-alpha.4"
sources."bignumber.js-9.1.2"
sources."buffer-from-1.1.2"
sources."chalk-4.1.2"
sources."chownr-2.0.0"
sources."cliui-8.0.1"
sources."color-convert-2.0.1"
sources."color-name-1.1.4"
sources."deprecation-2.3.1"
sources."emoji-regex-8.0.0"
sources."eol-0.9.1"
sources."escalade-3.2.0"
(sources."fs-minipass-2.1.0" // {
dependencies = [
sources."minipass-3.3.6"
];
})
sources."get-caller-file-2.0.5"
sources."has-flag-4.0.0"
sources."immutable-4.3.7"
sources."is-fullwidth-code-point-3.0.0"
sources."isarray-1.0.0"
sources."isobject-2.1.0"
sources."json-bigint-1.0.0"
sources."line-column-1.0.2"
sources."lodash-4.17.21"
sources."lodash.camelcase-4.3.0"
sources."lodash.clonedeep-4.5.0"
sources."lodash.isequal-4.5.0"
sources."long-5.2.3"
sources."minipass-5.0.0"
(sources."minizlib-2.1.2" // {
dependencies = [
sources."minipass-3.3.6"
];
})
sources."mkdirp-1.0.4"
sources."once-1.4.0"
sources."protobufjs-7.4.0"
sources."require-directory-2.1.1"
sources."seedrandom-3.0.5"
sources."source-map-0.6.1"
sources."source-map-support-0.5.21"
sources."string-width-4.2.3"
sources."strip-ansi-6.0.1"
sources."supports-color-7.2.0"
sources."tar-6.2.1"
sources."undici-types-6.19.8"
sources."universal-user-agent-6.0.1"
sources."wrap-ansi-7.0.0"
sources."wrappy-1.0.2"
sources."y18n-5.0.8"
sources."yallist-4.0.0"
sources."yargs-17.7.2"
sources."yargs-parser-21.1.1"
];
buildInputs = globalBuildInputs;
meta = {
description = "Core tool for the Quint specification language";
homepage = "https://github.com/informalsystems/quint";
license = "Apache 2.0";
};
production = true;
bypassCache = true;
reconstructLock = true;
};
"@informalsystems/quint-language-server" = nodeEnv.buildNodePackage {
name = "_at_informalsystems_slash_quint-language-server";
packageName = "@informalsystems/quint-language-server";
version = "0.14.4";
src = fetchurl {
url = "https://registry.npmjs.org/@informalsystems/quint-language-server/-/quint-language-server-0.14.4.tgz";
sha512 = "ZM8sCj5JsyWnI/Z9fspnwofuYpekcu3ke3A1wtrMndH4Vs2AHaJAiCGNy5wINLQho87LuvLL5eOGERbARgct/Q==";
};
dependencies = [
sources."@grpc/grpc-js-1.11.1"
sources."@grpc/proto-loader-0.7.13"
sources."@informalsystems/quint-0.20.0"
sources."@js-sdsl/ordered-map-4.4.2"
sources."@octokit/endpoint-9.0.5"
sources."@octokit/openapi-types-22.2.0"
sources."@octokit/request-8.4.0"
sources."@octokit/request-error-5.1.0"
sources."@octokit/types-13.5.0"
sources."@protobufjs/aspromise-1.1.2"
sources."@protobufjs/base64-1.1.2"
sources."@protobufjs/codegen-2.0.4"
sources."@protobufjs/eventemitter-1.1.0"
sources."@protobufjs/fetch-1.1.0"
sources."@protobufjs/float-1.0.2"
sources."@protobufjs/inquire-1.1.0"
sources."@protobufjs/path-1.1.2"
sources."@protobufjs/pool-1.1.0"
sources."@protobufjs/utf8-1.1.0"
sources."@sweet-monads/either-3.2.0"
sources."@sweet-monads/interfaces-3.3.0"
sources."@sweet-monads/maybe-3.2.0"
sources."@types/line-column-1.0.2"
sources."@types/lodash-4.17.7"
sources."@types/lodash.clonedeep-4.5.0"
sources."@types/node-22.5.1"
sources."@types/seedrandom-3.0.8"
sources."ansi-regex-5.0.1"
sources."ansi-styles-4.3.0"
sources."antlr4ts-0.5.0-alpha.4"
sources."bignumber.js-9.1.2"
sources."buffer-from-1.1.2"
sources."chalk-4.1.2"
sources."chownr-2.0.0"
sources."cliui-8.0.1"
sources."color-convert-2.0.1"
sources."color-name-1.1.4"
sources."deprecation-2.3.1"
sources."emoji-regex-8.0.0"
sources."eol-0.9.1"
sources."escalade-3.2.0"
(sources."fs-minipass-2.1.0" // {
dependencies = [
sources."minipass-3.3.6"
];
})
sources."get-caller-file-2.0.5"
sources."has-flag-4.0.0"
sources."immutable-4.3.7"
sources."is-fullwidth-code-point-3.0.0"
sources."isarray-1.0.0"
sources."isobject-2.1.0"
sources."json-bigint-1.0.0"
sources."line-column-1.0.2"
sources."lodash-4.17.21"
sources."lodash.camelcase-4.3.0"
sources."lodash.clonedeep-4.5.0"
sources."lodash.isequal-4.5.0"
sources."long-5.2.3"
sources."minipass-5.0.0"
(sources."minizlib-2.1.2" // {
dependencies = [
sources."minipass-3.3.6"
];
})
sources."mkdirp-1.0.4"
sources."once-1.4.0"
sources."protobufjs-7.4.0"
sources."require-directory-2.1.1"
sources."seedrandom-3.0.5"
sources."source-map-0.6.1"
sources."source-map-support-0.5.21"
sources."string-width-4.2.3"
sources."strip-ansi-6.0.1"
sources."supports-color-7.2.0"
sources."tar-6.2.1"
sources."undici-types-6.19.8"
sources."universal-user-agent-6.0.1"
sources."vscode-jsonrpc-6.0.0"
sources."vscode-languageserver-7.0.0"
sources."vscode-languageserver-protocol-3.16.0"
sources."vscode-languageserver-textdocument-1.0.12"
sources."vscode-languageserver-types-3.16.0"
sources."vscode-uri-3.0.8"
sources."wrap-ansi-7.0.0"
sources."wrappy-1.0.2"
sources."y18n-5.0.8"
sources."yallist-4.0.0"
sources."yargs-17.7.2"
sources."yargs-parser-21.1.1"
];
buildInputs = globalBuildInputs;
meta = {
description = "Language Server for the Quint specification language";
homepage = "https://github.com/informalsystems/quint#readme";
license = "Apache 2.0";
};
production = true;
bypassCache = true;
reconstructLock = true;
};
}

View File

@ -1,39 +0,0 @@
{ lib, fetchurl, stdenvNoCC, testers }:
stdenvNoCC.mkDerivation (finalAttrs: {
pname = "vfkit";
version = "0.5.1";
src = fetchurl {
url = "https://github.com/crc-org/vfkit/releases/download/v${finalAttrs.version}/vfkit";
hash = "sha256-at+KsvsKO359d4VUvcSuio2ej5hM6//U4Mj/jqXwhEc=";
};
dontUnpack = true;
installPhase = ''
runHook preInstall
install -Dm755 $src $out/bin/vfkit
runHook postInstall
'';
passthru.tests = {
version = testers.testVersion {
package = finalAttrs.finalPackage;
};
};
meta = {
description = "Simple command line tool to start VMs through virtualization framework";
homepage = "https://github.com/crc-org/vfkit";
license = lib.licenses.asl20;
maintainers = [ ];
platforms = lib.platforms.darwin;
# Source build will be possible after darwin SDK 12.0 bump
# https://github.com/NixOS/nixpkgs/pull/229210
sourceProvenance = [ lib.sourceTypes.binaryNativeCode ];
mainProgram = "vfkit";
};
})

View File

@ -2,13 +2,13 @@
with pkgs;
buildGoModule rec {
pname = "VictoriaMetrics";
version = "0.28.0-victorialogs";
version = "0.17.0-victorialogs";
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = "v${version}";
hash = "sha256-QCBUvUABx/+85HEfhDbuCHiVgP3lNBUdJ2BnspvRFHE=";
hash = "sha256-Ps55tCEw2UQch7yKJ1zYEtEE6fEE0ahhIiagt/hFxpo=";
};
vendorHash = null;
@ -26,7 +26,7 @@ buildGoModule rec {
# allow any go 1.22 version
substituteInPlace go.mod \
--replace-fail "go 1.22.5" "go 1.22"
--replace "go 1.22.4" "go 1.22"
# Increase timeouts in tests to prevent failure on heavily loaded builders
substituteInPlace lib/storage/storage_test.go \