nvim安装插件

-- ~/.config/nvim/init.lua

local lazypath = vim.fn.stdpath("data") .. "/lazy/lazy.nvim"
if not vim.loop.fs_stat(lazypath) then
  print("Installing lazy.nvim... Please wait.")
  vim.fn.system({
    "git",
    "clone",
    "--filter=blob:none",
    "https://github.com/folke/lazy.nvim.git",
    "--branch=stable",
    lazypath,
  })
end
vim.opt.rtp:prepend(lazypath)

require("lazy").setup({
  {
    "nvim-treesitter/nvim-treesitter",
    build = ":TSUpdate",  -- ✅ 字符串形式,安全可靠
    lazy = false,
    init = function()
      local ok, configs = pcall(require, "nvim-treesitter.configs")
      if not ok then return end

      configs.setup({
        ensure_installed = { "lua", "python", "javascript", "typescript", "c", "cpp" },
        highlight = { enable = true },
        folding = { enable = true },
        indent = { enable = true },
        auto_install = true,
      })

      vim.opt.foldmethod = "expr"
      vim.opt.foldexpr = "nvim_treesitter#foldexpr()"
      vim.opt.foldlevel = 99
    end,
  },
})

ollama的一些问题

  • 如果运行ollama list等之类的参数提示Error: could not connect to ollama app, is it running? 先运行ollama serve,然后再打开一个终端进行ollama的其他操作;
  • 如果在电脑上设置了OLLAMA_MODELS参数发现没有起作用,重启电脑可能会行;
  • 不同操作系统默认的models路径有差异:
macOS: ~/.ollama/models
Linux: /usr/share/ollama/.ollama/models
Windows: C:\Users\%username%\.ollama\models
  • models可以在不同操作系统间迁移。比如下载好了llama3:70b,运行ollama show –modelfile llama3:70b > llama370b.modfile, 将本机下载好的llama3:70b的models文件夹拷贝到目标位置,同时将llama370b.modfile中的FROM参数改成目标位置,在目标机器上运行ollama create llama370 -f llama370b.modfile