about summary refs log tree commit diff
path: root/modules/emacs
diff options
context:
space:
mode:
authorazahi <azat@bahawi.net>2024-11-30 00:48:38 +0300
committerazahi <azat@bahawi.net>2024-11-30 00:48:38 +0300
commita8129a3084163038029f520d631de2b740b1ea75 (patch)
treecd7ce1d60f8b64ef8648076ea27799049861301d /modules/emacs
parent2024-11-17 (diff)
2024-11-30
Diffstat (limited to '')
-rw-r--r--modules/emacs/doom/config.el23
-rw-r--r--modules/emacs/doom/packages.el15
2 files changed, 26 insertions, 12 deletions
diff --git a/modules/emacs/doom/config.el b/modules/emacs/doom/config.el
index 464a8b2..a212e17 100644
--- a/modules/emacs/doom/config.el
+++ b/modules/emacs/doom/config.el
@@ -271,11 +271,13 @@
          gptel-backend (gptel-make-ollama "ollama"
                          :host "eonwe.shire.net:11434"
                          :stream t
-                         :models '(mistral:7b
+                         :models '(dagbs/qwen2.5-coder-7b-instruct-abliterated:latest
+                                   qwen2.5-coder:7b
+                                   deepseek-coder-v2:16b
                                    codegemma:7b
                                    codellama:7b
-                                   deepseek-coder-v2:16b
-                                   llama3.2:3b))))
+                                   llama3.2:3b
+                                   mistral:7b))))
 
 (use-package! ellama
   :init
@@ -306,6 +308,18 @@
                                        :port 11434
                                        :chat-model "qwen2.5:7b"
                                        :embedding-model "nomic-embed-text:latest"))
+                            ("qwen-coder" . (make-llm-ollama
+                                             :scheme "http"
+                                             :host "eonwe.shire.net"
+                                             :port 11434
+                                             :chat-model "qwen2.5-coder:7b"
+                                             :embedding-model "nomic-embed-text:latest"))
+                            ("qwen-coder-instruct" . (make-llm-ollama
+                                                      :scheme "http"
+                                                      :host "eonwe.shire.net"
+                                                      :port 11434
+                                                      :chat-model "dagbs/qwen2.5-coder-7b-instruct-abliterated:latest"
+                                                      :embedding-model "nomic-embed-text:latest"))
                             ("gemma" . (make-llm-ollama
                                         :scheme "http"
                                         :host "eonwe.shire.net"
@@ -357,8 +371,7 @@
                                        :scheme "http"
                                        :host "eonwe.shire.net"
                                        :port 11434
-                                       :chat-model "llama3.2:3b"
+                                       :chat-model "dagbs/qwen2.5-coder-7b-instruct-abliterated:latest"
                                        :embedding-model "nomic-embed-text:latest"))
 
-
   (magit-gptcommit-status-buffer-setup))
diff --git a/modules/emacs/doom/packages.el b/modules/emacs/doom/packages.el
index 8fb24f4..a7085da 100644
--- a/modules/emacs/doom/packages.el
+++ b/modules/emacs/doom/packages.el
@@ -8,18 +8,19 @@
 
 (package! org-roam-ui)
 
+;; https://github.com/doomemacs/doomemacs/issues/8166
+;; https://github.com/org-roam/org-roam/issues/2485
+(unpin! emacsql)
+(package! emacsql
+  :recipe (:host github :repo "magit/emacsql")
+  :pin "491105a01f58bf0b346cbc0254766c6800b229a2")
+
 (package! nickel-mode)
 
 (package! hledger-mode)
 
 (package! sops
-  :recipe (:host github
-           :repo "djgoku/sops"))
-
-(unpin! ansible)
-(package! ansible
-  :recipe (:host gitlab
-           :repo "emacs-ansible/emacs-ansible"))
+  :recipe (:host github :repo "djgoku/sops"))
 
 (package! gptel)
 (package! ellama)

Consider giving Nix/NixOS a try! <3