[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[elpa] externals/ellama de226f9d39 41/53: use llm library to hanle chat
From: |
ELPA Syncer |
Subject: |
[elpa] externals/ellama de226f9d39 41/53: use llm library to hanle chat history |
Date: |
Sun, 17 Dec 2023 18:57:59 -0500 (EST) |
branch: externals/ellama
commit de226f9d3976fb5a5e9b8a13214d1069962846c0
Author: Sergey Kostyaev <s.kostyaev@omp.ru>
Commit: Sergey Kostyaev <kostyaev.sergey2@wb.ru>
use llm library to hanle chat history
---
ellama.el | 211 +++++++++++++++++---------------------------------------------
1 file changed, 57 insertions(+), 154 deletions(-)
diff --git a/ellama.el b/ellama.el
index 9ae6996386..3762b5fa55 100644
--- a/ellama.el
+++ b/ellama.el
@@ -1,11 +1,11 @@
-;;; ellama.el --- Ollama client for calling local LLMs -*- lexical-binding: t
-*-
+;;; ellama.el --- Tool for interacting with LLMs -*- lexical-binding: t -*-
;; Copyright (C) 2023 Sergey Kostyaev
;; Author: Sergey Kostyaev <sskostyaev@gmail.com>
;; URL: http://github.com/s-kostyaev/ellama
;; Keywords: help local tools
-;; Package-Requires: ((emacs "28.1")(llm "0.4.0")(spinner "1.7.4"))
+;; Package-Requires: ((emacs "28.1")(llm "0.5.0")(spinner "1.7.4"))
;; Version: 0.1.0
;; Created: 8th Oct 2023
@@ -24,8 +24,12 @@
;;; Commentary:
;;
-;; Ellama is ollama client for Emacs. Adds ability to call local LLMs from
-;; inside Emacs.
+;; Ellama is a tool for interacting with large language models from Emacs.
+;; It allows you to ask questions and receive responses from the
+;; LLMs. Ellama can perform various tasks such as translation, code
+;; review, summarization, enhancing grammar/spelling or wording and
+;; more through the Emacs interface. Ellama natively supports streaming
+;; output, making it effortless to use with your preferred text editor.
;;
;;; Code:
@@ -36,29 +40,13 @@
(require 'spinner)
(defgroup ellama nil
- "Ollama client for Emacs."
+ "Tool for interacting with LLMs for Emacs."
:group 'ellama)
-(defcustom ellama-url "http://localhost:11434/api/generate" "Url to call
ollama."
- :group 'ellama
- :type 'string)
-
-(defcustom ellama-curl-executable (executable-find "curl") "Path to curl
executable."
- :group 'ellama
- :type 'string)
-
-(defcustom ellama-model "zephyr" "Model to use ollama with."
- :group 'ellama
- :type 'string)
-
(defcustom ellama-buffer "*ellama*" "Default ellama buffer."
:group 'ellama
:type 'string)
-(defcustom ellama-always-show-buffer nil "Always show ellama buffer."
- :group 'ellama
- :type 'boolean)
-
(defcustom ellama-user-nick "User" "User nick in logs."
:group 'ellama
:type 'string)
@@ -75,10 +63,6 @@
:group 'ellama
:type 'string)
-(defcustom ellama-template nil "Template to use with ollama instead of
default."
- :group 'ellama
- :type 'string)
-
(defcustom ellama-provider
(make-llm-ollama
:chat-model "zephyr" :embedding-model "zephyr")
@@ -90,11 +74,7 @@
:group 'ellama
:type 'symbol)
-(defvar-local ellama-context nil "Context that contains ellama conversation
memory.")
-
-(defvar-local ellama--unprocessed-data nil)
-
-(defvar-local ellama--request nil)
+(defvar-local ellama--chat-prompt nil)
(defvar ellama--code-prefix
(rx (minimal-match
@@ -172,129 +152,54 @@ In BUFFER at POINT will be inserted result between
PREFIX and SUFFIX."
(spinner-stop)))
(lambda (_ msg) (error "Error calling the LLM: %s"
msg)))))))
-(defun ellama--filter (proc string)
- "Filter function for ellama curl process.
-Filter PROC output STRING."
- (when (buffer-live-p (process-buffer proc))
- (with-current-buffer (process-buffer proc)
- (let ((moving (= (point) (process-mark proc))))
- ;; Insert the text, advancing the process marker.
- ;; For buffers other than ellama-buffer, stay on current point.
- (if (string= (buffer-name (process-buffer proc))
- ellama-buffer)
- (goto-char (process-mark proc))
- (set-marker (process-mark proc) (point)))
- (when ellama--unprocessed-data
- (setq string (concat ellama--unprocessed-data string)))
- (condition-case nil
- (progn
- (mapc (lambda (s)
- (when-let ((data
- (json-parse-string s :object-type 'plist)))
- (when-let ((context (plist-get data :context)))
- (setq ellama-context context))
- (when-let ((response (plist-get data :response)))
- (goto-char (process-mark proc))
- (insert response)
- (set-marker (process-mark proc) (point)))))
- (split-string string "\n" t))
- (setq ellama--unprocessed-data nil)
- (set-marker (process-mark proc) (point))
- (if moving (goto-char (process-mark proc))))
- (error (setq ellama--unprocessed-data
- (car (last (split-string string "\n" t))))))))))
-
-(defun ellama-query (prompt &rest args)
- "Query ellama for PROMPT.
-
-ARGS contains keys for fine control.
-
-:buffer BUFFER -- BUFFER is the buffer (or `buffer-name') to insert ollama
reply
-in. Default value is `ellama-buffer'.
-
-:display BOOL -- If BOOL, show BUFFER to user.
-Default value is `ellama-always-show-buffer'.
-
-:log BOOL -- If BOOL, show conversation between user and ellama, prefixed with
-nicks.
-
-:model MODEL -- MODEL that ollama should use to generate answer. Default value
-is `ellama-model'.
-
-:memory BOOL -- If BOOL, enable conversation memory.
-
-:system SYSTEM -- SYSTEM message for prompt MODEL. If not set, default value
-inside ollama will be used. May not work for some models, see
-https://github.com/jmorganca/ollama/issues/693 - :template can help you in that
-case.
-
-:temperature TEMPERATURE -- set MODEL temperature to TEMPERATURE. If not set,
- default value inside ollama will be used.
-
-:template TEMPLATE -- TEMPLATE to use with ollama MODEL instead of ollama's
-default. Default value is `ellama-template'."
- (let ((buffer (or (plist-get args :buffer) ellama-buffer))
- (display (or (plist-get args :display) ellama-always-show-buffer))
- (log (plist-get args :log))
- (model (or (plist-get args :model) ellama-model))
- (memory (plist-get args :memory))
- (system (plist-get args :system))
- (temperature (plist-get args :temperature))
- (template (or (plist-get args :template) ellama-template)))
- (when (not (get-buffer buffer))
- (create-file-buffer buffer)
- (with-current-buffer buffer
- (if ellama-buffer-mode
- (funcall ellama-buffer-mode))))
- (when display
- (display-buffer buffer))
- (when log
- (with-current-buffer buffer
- (save-excursion
- (goto-char (point-max))
- (insert "## " ellama-user-nick ":\n" prompt "\n\n"
- "## " ellama-assistant-nick ":\n"))))
- (let ((sentinel (if log
- (lambda (proc event)
- (when (string= event "finished\n")
- (with-current-buffer (process-buffer proc)
- (save-excursion
- (goto-char (point-max))
- (insert "\n\n"))
- (spinner-stop))))
- (lambda (proc event)
- (when (string= event "finished\n")
- (with-current-buffer (process-buffer proc)
- (spinner-stop)))))))
- (with-current-buffer buffer
- (setq ellama--request (list :model model :prompt prompt))
- (when (and memory ellama-context)
- (setq ellama--request (plist-put ellama--request :context
ellama-context)))
- (when system
- (setq ellama--request (plist-put ellama--request :system system)))
- (when temperature
- (setq ellama--request (plist-put ellama--request :options
- (list :temperature temperature))))
- (when template
- (setq ellama--request (plist-put ellama--request :template template)))
- ;; (message "request: %s" (json-encode-plist ellama--request))
- (make-process
- :buffer buffer
- :name "ellama"
- :command (list
- ellama-curl-executable
- "-s" "-X" "POST" ellama-url "-d"
- (json-encode-plist ellama--request))
- :filter 'ellama--filter
- :sentinel sentinel)
- (spinner-start ellama-spinner-type)))))
+;;;###autoload
+(defun ellama-chat (prompt)
+ "Send PROMPT to ellama chat with conversation history."
+ (interactive "sAsk ellama: ")
+ (while (not (buffer-live-p (get-buffer ellama-buffer)))
+ (get-buffer-create ellama-buffer)
+ (with-current-buffer ellama-buffer
+ (funcall ellama-buffer-mode)))
+ (with-current-buffer ellama-buffer
+ (display-buffer ellama-buffer)
+ (if ellama--chat-prompt
+ (llm-chat-prompt-append-response
+ ellama--chat-prompt prompt)
+ (setq ellama--chat-prompt (llm-make-simple-chat-prompt prompt)))
+ (save-excursion
+ (goto-char (point-max))
+ (insert "## " ellama-user-nick ":\n" prompt "\n\n"
+ "## " ellama-assistant-nick ":\n")
+ (let* ((start (make-marker))
+ (end (make-marker))
+ (point (point-max))
+ (insert-text
+ (lambda (text)
+ ;; Erase and insert the new text between the marker cons.
+ (with-current-buffer (marker-buffer start)
+ (save-excursion
+ (goto-char start)
+ (delete-region start end)
+ (insert text))))))
+ (set-marker start point)
+ (set-marker end point)
+ (set-marker-insertion-type start nil)
+ (set-marker-insertion-type end t)
+ (spinner-start ellama-spinner-type)
+ (llm-chat-streaming ellama-provider
+ ellama--chat-prompt
+ insert-text
+ (lambda (text)
+ (funcall insert-text text)
+ (with-current-buffer ellama-buffer
+ (save-excursion
+ (goto-char (point-max))
+ (insert "\n\n"))
+ (spinner-stop)))
+ (lambda (_ msg) (error "Error calling the LLM: %s"
msg)))))))
;;;###autoload
-(defun ellama-ask ()
- "Ask ellama about something."
- (interactive)
- (let ((prompt (read-string "Ask ellama: ")))
- (ellama-query prompt :display t :log t :memory t)))
+(defalias 'ellama-ask 'ellama-chat)
;;;###autoload
(defun ellama-ask-about ()
@@ -304,9 +209,7 @@ default. Default value is `ellama-template'."
(text (if (region-active-p)
(buffer-substring-no-properties (region-beginning)
(region-end))
(buffer-substring-no-properties (point-min) (point-max)))))
- (ellama-query
- (format "Text:\n%s\nRegarding this text, %s" text input)
- :display t :log t :memory t)))
+ (ellama-chat (format "Text:\n%s\nRegarding this text, %s" text input))))
(defun ellama-instant (prompt)
"Prompt ellama for PROMPT to reply instantly."
- [elpa] externals/ellama 162433ada3 15/53: mention streaming output in readme, (continued)
- [elpa] externals/ellama 162433ada3 15/53: mention streaming output in readme, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama caf4578e8d 21/53: add llm keyword to documentation, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama f35622b5f7 24/53: change default model to zephyr, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 92d4349bac 25/53: add information about local LLMs into package summary, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 75ed340a4d 26/53: add ollama into installation instructions, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 8caaf7a3b6 28/53: Merge pull request #2 from s-kostyaev/s-kostyaev-enable-gitlab-ci, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 235f41be0c 29/53: :recycle: Refactor ellama.el code for improved response handling., ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 316e982ef6 32/53: fix spinner for ellama-instant, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 06ad60d117 35/53: fix upate selected region with ellama (fixes #6), ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 956dfe62aa 37/53: first step to use llm library for LLM calls, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama de226f9d39 41/53: use llm library to hanle chat history,
ELPA Syncer <=
- [elpa] externals/ellama b8f3dada5a 40/53: use spinner with llm library, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 42db1e0ba0 42/53: update readme, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 2700be1cf9 44/53: upate llm requirement to fix #8, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 702042c0bf 45/53: Update llm library to fix #9, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 11105e4169 50/53: Update llm dependency, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 2666f37537 01/53: Initial commit, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 14b5b2a351 13/53: add ellama-ask gif, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 6e0b054030 20/53: use defvar-local instead of defvar + make-local-variable, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama c3d90408dd 22/53: rephrase ellama summary in documentation, ELPA Syncer, 2023/12/17
- [elpa] externals/ellama 6b0108b8d6 36/53: fix conversation memory (fixes #5), ELPA Syncer, 2023/12/17