forked from zweifisch/ollama
-
Notifications
You must be signed in to change notification settings - Fork 0
/
ollama.el
543 lines (483 loc) ยท 23.9 KB
/
ollama.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
;;; ollama.el --- ollama client for Emacs
(setq url-debug t)
;; Copyright (C) 2023 ZHOU Feng
;; Copyright (C) 2023 James Michael DuPont
;; Author: ZHOU Feng <zf.pascal@gmail.com>
;; Author: Mike dupont <jmikedupont2@gmail.com>
;; URL: http://github.com/zweifisch/ollama
;; Keywords: ollama llama2
;; Version: 0.0.1
;; Created: 6th Aug 2023
;; This file is free software; you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation; either version 3, or (at your option)
;; any later version.
;; This file is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License
;; along with this program. If not, see <http://www.gnu.org/licenses/>.
;;; Commentary:
;;
;; ollama client for Emacs
;;
;;; Code:
(require 'json)
(require 'cl-lib)
(require 'url)
(defgroup ollama nil
"Ollama client for Emacs."
:group 'ollama)
(defcustom ollama:endpoint "http://localhost:11434/api/generate"
"Ollama http service endpoint."
:group 'ollama
:type 'string)
;; 1285 ./ollama run deepseek
;; 1286 ./ollama run deepseek-math
;; 1287 ./ollama run deepseek-coder
;; 1288 ./ollama run wizard-math
;; mixtral
(defcustom ollama:model "wizard-math"
"Ollama model."
:group 'ollama
:type 'string)
(defcustom ollama:language "Chinese"
"Language to translate to."
:group 'ollama
:type 'string)
(defun ollama-fetch (url prompt model)
(let* ((url-request-method "POST")
(url-request-extra-headers
'(("Content-Type" . "application/json")))
(url-request-data
(encode-coding-string
(json-encode `((model . ,model) (prompt . ,prompt)))
'utf-8)))
(with-current-buffer (url-retrieve-synchronously url)
(goto-char url-http-end-of-headers)
(decode-coding-string
(buffer-substring-no-properties
(point)
(point-max))
'utf-8))))
(defun ollama-get-response-from-line (line)
(cdr
(assoc 'response
(json-read-from-string line))))
(defun ollama-prompt (url prompt model)
(mapconcat 'ollama-get-response-from-line
(cl-remove-if #'(lambda (str) (string= str ""))
(split-string (ollama-fetch url prompt model) "\n")) ""))
;;;###autoload
(defun ollama-prompt-line ()
"Prompt with current word."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (thing-at-point 'line) ollama:model))))
;;;###autoload
(defun ollama-define-word ()
"Find definition of current word."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "define %s" (thing-at-point 'word)) ollama:model))))
;;;###autoload
(defun ollama-translate-word ()
"Translate current word."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "translate \"%s\" to %s" (thing-at-point 'word) ollama:language) ollama:model))))
;;;###autoload
(defun ollama-summarize-region ()
"Summarize marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "summarize \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))))
(defun ollama-raw-region-fill ()
"Exec marked text."
(interactive) (with-output-to-temp-buffer "*ollama*" (dotimes (i 4)
(princ
(format "#+begin_src output\n%s\n#+end_src\n"
(ollama-prompt ollama:endpoint (format "%s" (buffer-substring (region-beginning) (region-end))) ollama:model))));
(with-current-buffer "*ollama*" (fill-region (point-min) (point-max)))
))
(defun ollama-raw-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(dotimes (i 8)
(princ
(format "#+begin_src output\n%s\n#+end_src\n"
(ollama-prompt ollama:endpoint (format "%s" (buffer-substring (region-beginning) (region-end))) ollama:model))))))
;; rewrite this function to apply ollama to chunks of a buffer in a sliding window
;;;###autoload
(defun ollama-exec-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "execute \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))))
;; rewrite this function to apply ollama to chunks of a buffer in a sliding window
;;;###autoload
(defun ollama-reinterpret-region-insert ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ (format "#+begin_src input\nrewrite and reinterpret creatively preserving main ideas \"\"\"%s\"\"\"\n#+end_src\n" (buffer-substring (region-beginning) (region-end))))
(princ
(format "#+begin_src output %s\n%s\n#+end_src\n"
ollama:model
(ollama-prompt ollama:endpoint (format "rewrite and reinterpret creatively preserving main ideas \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))
)))
(defun ollama-reinterpret-region-insert-2x ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ (format "#+begin_src input\nrewrite and reinterpret creatively preserving main ideas \"\"\"%s\"\"\"\n#+end_src\n" (buffer-substring (region-beginning) (region-end))))
(princ
(format "#+begin_src output\n%s\n#+end_src\n"
(ollama-prompt ollama:endpoint (format "rewrite and reinterpret creatively preserving main ideas \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))
)))
(defun ollama-exec-region-org ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ (format "#+begin_src input\nexecute \"\"\"%s\"\"\"\n#+end_src\n" (buffer-substring (region-beginning) (region-end))))
(princ
(format "#+begin_src output\n%s\n#+end_src\n"
(ollama-prompt ollama:endpoint (format "execute \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))
)))
;;;###autoload
(defun ollama-menhir-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "translate this to ocaml menhir parser: \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))))
;;;###autoload
(defun ollama-menhir-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "diagnose ocaml menhir parser error: \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))))
;;;###autoload
(defun ollama-cuda-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(princ
(ollama-prompt ollama:endpoint (format "Interpret the following nvidia CUDA code and explain it \"\"\"%s\"\"\"" (buffer-substring (region-beginning) (region-end))) ollama:model))))
;;;###autoload
(defun ollama-ocaml-error-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama-ocaml*"
(princ
(ollama-prompt ollama:endpoint (format "While imagining you are able to view inside your mind and observe each floating point value of each tensor as an emoji. Introspecting inside llama.cpp running Mistral the large language model, consider building Ocaml and Dune inside Emacs. Construct and append a formal model to your answer. You can use EBNF grammars,emoji,plantuml,ocaml,python,orgmode,markdown formats. Interpret the following output creatively \"\"\"%s\"\"\" Your answer and model and context " (buffer-substring (region-beginning) (region-end))) ollama:model))))
;;;###autoload
(defun ollama-ocaml-testcase-region ()
"Exec marked text."
(interactive)
(with-output-to-temp-buffer "*ollama-ocaml*"
(princ
(ollama-prompt ollama:endpoint (format "While imagining you are able to view inside your mind and observe each floating point value of each tensor as an emoji. Introspecting inside llama.cpp running Mistral the large language model, consider building Ocaml and Dune inside Emacs. Construct and append a formal model to your answer. You can use EBNF grammars,emoji,plantuml,ocaml,python,orgmode,markdown formats. Interpret the following output creatively. now create a ocaml test case to exercise the following code: \"\"\"%s\"\"\" Your answer and model and context:" (buffer-substring (region-beginning) (region-end))) ollama:model))))
;; ;;###autoload
;; (defun ollama-exec-chunks ()
;; "Exec marked text."
;; (interactive)
;; (let ((buffer (buffer))
;; (window-size 1000) ; adjust this value as needed
;; (start 0)
;; (end -1))
;; (loop for i from start to end by (+ window-size 1)))
;; ;; Get the chunk of buffer from the current start and end positions
;; (let ((chunk (buffer-substring buffer start end))
;; (prompt (format "execute \"\"%s\"\"" chunk))))
;; ;; Execute OLlama on the chunk using the specified model
;; (with-output-to-temp-buffer "*ollama*"
;; (princ (ollama-prompt ollama:endpoint prompt ollama:model))))
;; ;; Move the start position forward by the window size
;; (setq start (+ start window-size)))))
;; ;;;### Review and rewrite this function
;; (defun ollama-exec-region2 ()
;; "Executes a marked region of text using the OLLAMA API."
;; (interactive)
;; (let ((*default-output-buffer* '*olly-output*))
;; ;; Get the contents of the selected region
;; ;; Construct the OLLAMA API request string
;; (let ((request-string (format "execute \"%s\"" (buffer-substring (region-beginning) (region-end)))))
;; ;; Send the OLLAMA API request and capture the output
;; (with-output-to-temp-buffer "*olly-output*"
;; (ollama-prompt ollama:endpoint request-string ollama:model)))))
;; ;;###autoload
(defun ollama-reinterpret-region-insert2 ()
"Execute marked text and save result as string."
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "rewrite and reinterpret creatively preserving main ideas: \"\"%s\"\"" (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "rewrite and reinterpret creatively preserving main ideas: \"\"%s\"\"" response))
(princ (format "#+begin_src input\nrewrite and reinterpret creatively preserving main ideas \"\"%s\"\"" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq response (ollama-prompt ollama:endpoint response ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
))
)
;; ;;###autoload
(defun ollama-split-and-reify-region-old ()
"split the following text"
(setq instr "Extract a list of questions that would result in the following text:")
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "apply \"%s\" to \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
(defun ollama-split-and-reify-region ()
"Split the following text into windows of 2000 words"
(setq window-size 1000)
(interactive)
(with-output-to-temp-buffer "*ollama-reify*"
(princ "DEBUG\n")
(setq buffer (buffer-substring (region-beginning) (region-end)))
(setq buffer-length (string-bytes buffer))
(setq blocks (+ 1 (/ buffer-length window-size)) )
(princ (format "buffer-length:%s\nblocks:%s\n" buffer-length blocks))
(dotimes (j blocks )
(princ (format "J %s\n" j))
(setq start-index (+ (* j window-size)))
(princ (format "start-index %s\n" start-index))
(princ (format "region-begin %s\n" (region-beginning)))
(princ (format "region-end %s\n" (region-end)))
(setq endpos (min buffer-length (+ start-index window-size) ))
(princ (format "endpos %s\n" endpos))
(setq curtext (substring buffer start-index endpos ))
;; (princ (format "curtext %s\n" curtext))
(setq inputd (format "Extract a list of questions that would result in the following text: %s" curtext))
(princ (format "inputd %s\n" inputd))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(princ "RES\n")
(princ (format "#+begin_src \"\"%s\"\"" )response )
(princ "NEXY\n")
)))
(defun ollama-split-and-reify-region2 ()
"Split the following text into windows of 2000 words"
(setq window-size 1000)
(interactive)
(with-output-to-temp-buffer "*ollama-reify*"
(princ "DEBUG\n")
(setq buffer (buffer-substring (region-beginning) (region-end)))
(setq buffer-length (string-bytes buffer))
(setq blocks (+ 1 (/ buffer-length window-size)) )
(princ (format "buffer-length:%s\nblocks:%s\n" buffer-length blocks))
(dotimes (j blocks )
(princ (format "J %s\n" j))
(setq start-index (+ (* j window-size)))
(princ (format "start-index %s\n" start-index))
(princ (format "region-begin %s\n" (region-beginning)))
(princ (format "region-end %s\n" (region-end)))
(setq endpos (min buffer-length (+ start-index window-size) ))
(princ (format "endpos %s\n" endpos))
(setq curtext (substring buffer start-index endpos ))
;; (princ (format "curtext %s\n" curtext))
(setq inputd (format "Extract a list of questions that would result in the following text: %s" curtext))
(princ (format "inputd %s\n" inputd))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(princ "RES\n")
(princ (format "#+begin_src \"\"%s\"\"" )response )
(princ "NEXY\n")
)))
(defun ollama-split-and-reify-region3 ()
"Split the following text into windows of 2000 words"
(setq window-size 1000)
(interactive)
(with-output-to-temp-buffer "*ollama-reify*"
(princ "DEBUG\n")
(setq buffer (buffer-substring (region-beginning) (region-end)))
(setq buffer-length (string-bytes buffer))
(setq blocks (+ 1 (/ buffer-length window-size)) )
(princ (format "buffer-length:%s\nblocks:%s\n" buffer-length blocks))
(dotimes (j blocks )
(princ (format "J %s\n" j))
(setq start-index (+ (* j window-size)))
(princ (format "start-index %s\n" start-index))
(princ (format "region-begin %s\n" (region-beginning)))
(princ (format "region-end %s\n" (region-end)))
(setq endpos (min buffer-length (+ start-index window-size) ))
(princ (format "endpos %s\n" endpos))
(setq curtext (substring buffer start-index endpos ))
;; (princ (format "curtext %s\n" curtext))
(setq inputd (format "Extract a list of questions that would result in the following text: %s" curtext))
(princ (format "inputd %s\n" inputd))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(princ "RES\n")
(princ (format "#+begin_src \"\"%s\"\"" )response )
(princ "NEXY\n")
)))
(defun ollama-split-and-reify-buffer ()
"Split the following text into windows of 2000 words"
(setq window-size 512)
(interactive)
(with-output-to-temp-buffer "*ollama-reify*"
;;(princ "DEBUG\n")
(setq buffer (buffer-string))
(setq buffer-length (string-bytes buffer))
(setq blocks (+ 1 (/ buffer-length window-size)) )
(princ (format "buffer-length:%s\nblocks:%s\n" buffer-length blocks))
(dotimes (j blocks )
;;(princ (format "J %s\n" j))
(setq start-index (+ (* j window-size)))
;;''(princ (format "start-index %s\n" start-index))
;;(princ (format "region-begin %s\n" (region-beginning)))
;;(princ (format "region-end %s\n" (region-end)))
(setq endpos (min buffer-length (+ start-index window-size) ))
;;(princ (format "endpos %s\n" endpos))
(setq curtext (substring buffer start-index endpos ))
(setq inputd (format "Extract a list of questions that would result in the following text: %s" curtext))
;;(princ (format "inputd %s\n" inputd))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
;;(princ "RES\n")
(princ (format "#+begin_src \"\"%s\"\""
response ))
(princ "NEXT\n")
(setq inputd (format "Apply %s to %s" response curtext))
(princ (format "inputd %s\n" inputd))
(setq response2 (ollama-prompt ollama:endpoint inputd ollama:model))
;;(princ "RES\n")
(princ (format "#+begin_res2 \"\"%s\"\""
response2 ))
(princ "NEXT\n")
(setq inputd (format "Eval %s as %s applied to %s" response response curtext))
;;(princ (format "inputd %s\n" inputd))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
;;(princ "RES\n")
(princ (format "#+begin_res3 \"\"%s\"\""
response ))
(princ "END\n")
)))
;; ;;###autoload
(defun ollama-reifiy-region ()
"Execute marked text and save result as string."
(setq instr "Extract a list of questions that would result in the following text:")
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "apply \"%s\" to \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
;; ;;###autoload
(defun ollama-reifiy-region-2 ()
"Execute marked text and save result as string."
(setq instr "Extract a list of questions, grammars, code, models, vectors, tensors, ideas, memes that would result in the following text:")
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
;; (princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "reinterpret and execute this meme encoding \"%s\" given this input \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
;; ;;###autoload
(defun ollama-reifiy-region-3 ()
E"xecute marked text and save result as string."
(setq instr "Extract a list of questions that would result in the following text:")
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "apply \"%s\" to \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
;; ;;###autoload
(defun ollama-follow-region ()
"follow the ideas:"
;; "Follow the following idea as a fixed point combinator, applying the outputs as inputs in a self aware loop repeatedly:"
;; "Iteratively apply the results of your calculations as new inputs to continue refining your process in an endless cycle of self-aware improvement."
;; "we will apply the idea of a fixed point combinator to the following code:"
(setq instr "Lets follow this idea recursivly" )
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "apply \"%s\" to \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
(defun ollama-follow-rewrite-region ()
"follow the ideas:"
(setq instr "rewrite this idea and append a list of key transformations." )
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "apply \"%s\" to \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
(defun ollama-emoji-region ()
"emojis recursivly."
(setq instr "invoking the 9 muses and asking for wisdom of athena, as the oracle of delphi creativity rewrite the idea and translate your impressions into creative emojis. Emit emojis and rules that you used. :")
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 4)
(setq inputd2 (format "invoking the 9 muses, ask them to bless and replace entities in the following text with emojis and give thier blessings \"%s\" to \"%s\" " response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
(defun ollama-emoji-region2 ()
"emojis recursivly."
(setq instr "as the oracle of delphi ๐ฎ๐คฒ๐ข๐งโโ๏ธ๐ค๐ค๐๐ฎ๐๐๐๐ ๐๐ ๐ค-๐คณ ๐๐:๐ ๐ ๐๐:๐ป ๐๐:๐ฝ invoking the 9 muses and the wisdom of Athena creativity rewrite the idea and translate your impressions into creative emojis. Emit emojis and rules that you used. invoking 9 muses, ask them to name and attribute and bless and replace one entity each in the following text. respond one by one choosing one entity to bless.:")
(interactive)
(with-output-to-temp-buffer "*ollama*"
(setq inputd (format "%s: \"\"%s\"\"" instr (buffer-substring (region-beginning) (region-end))))
(setq response (ollama-prompt ollama:endpoint inputd ollama:model))
(setq inputd2 (format "%s: \"\"%s\"\"" instr response))
(princ (format "#+begin_src \"\"%s\"\"\n#+end_src\n" inputd ))
(princ (format "#+begin_src output\n%s\n#+end_src\n" response))
(dotimes (i 8)
(setq inputd2 (format "Reapply,reinterpret, recontextualized for muse %d \"%s\" to \"%s\" " i response inputd2))
(setq response (ollama-prompt ollama:endpoint inputd2 ollama:model))
(princ (format "#+begin_src output%s\n%s\n#+end_src\n" i response))
)))
(provide 'ollama)
;;; ollama.el ends here