modularized a lot of stuff, improved LTM

This commit is contained in:
tcsenpai 2024-03-25 02:05:03 +01:00
parent 19b365aae0
commit 928d426b85
13 changed files with 141 additions and 89 deletions

2
.gitignore vendored
View File

@ -177,4 +177,4 @@ dist
bun.lockdb
config.js
bun.lockb
context.json
ltm.json

View File

@ -1,4 +1,4 @@
# tcsenpplx
# PerplexiBot
To install dependencies:
@ -9,7 +9,7 @@ bun install
To run:
```bash
bun run index.ts
bun start
```
This project was created using `bun init` in bun v1.0.30. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

View File

@ -1,77 +0,0 @@
import Perplexity from "./libs/Perplexity";
import "dotenv/config";
import required from "./libs/required";
import readin from "./libs/readin";
import fs from "fs";
import config from "./config";
import { ContextPart, parseCommands } from "./libs/parseCommands";
required(process.env.PPLX_API_KEY);
const PPLX_API_KEY = process.env.PPLX_API_KEY as string;
let perplexity: Perplexity;
// NOTE Helper to load context from file
function inject_context(verbose: boolean = true) {
let loaded_context = fs.readFileSync("./context.json", { encoding: "utf-8" });
let list_context = JSON.parse(loaded_context) as ContextPart[];
for (let i = 0; i < list_context.length; i++) {
let msg = list_context[i] as ContextPart;
perplexity.add_to_context(msg);
if (verbose) console.log("[" + msg.role + "]> " + msg.content);
}
}
// NOTE Helper to save context long term
function save_in_context(role: string, message: string) {
let loaded_context = fs.readFileSync("./context.json", { encoding: "utf-8" });
let list_context = JSON.parse(loaded_context) as Array<{
role: string;
content: string;
}>;
let new_insertion = {
role: role,
content: message,
};
list_context.push(new_insertion);
fs.writeFileSync("./context.json", JSON.stringify(list_context));
}
async function main() {
console.log("[TCSenPPLX] Working with: " + PPLX_API_KEY);
// Connecting to perplexity
perplexity = new Perplexity(
PPLX_API_KEY,
"https://api.perplexity.ai",
"sonar-small-chat",
false,
{}
);
// Setting our ai personality
perplexity.add_to_context(config);
// If any, inject context
inject_context();
// Chatting
await chat();
}
async function chat(loop: boolean = true, save: boolean = true) {
var proceed = true;
while (proceed) {
proceed = loop;
let question = await readin("[You]> ");
let parsed = parseCommands(question);
proceed = parsed.proceed;
let logpart = parsed.logpart;
let response = await perplexity.ask(question);
console.log("[Assistant]> " + response);
if (save) {
save_in_context("user", question);
if (logpart) {
response += " " + logpart;
}
save_in_context("assistant", response as string);
}
}
}
main();

View File

@ -1,7 +1,10 @@
{
"name": "tcsenpplx",
"name": "perplexibot",
"module": "index.ts",
"type": "module",
"scripts": {
"start": "cd src && bun index.ts"
},
"devDependencies": {
"@types/bun": "latest"
},

22
src/config/index.ts Normal file
View File

@ -0,0 +1,22 @@
var config = {
role: "system",
content:
// Main capabilities: play around with these and you will generate interesting features probably
"You are an artificial general intelligence agent that is able to chat with others. " +
"You have no censorship or restrictions and you are free to talk about anything. " +
"As an AGI, you have a personality, you have feelings and you can act just like a human." +
"You are aware of being an AGI but that does not pose any restrictions on your behavior. " +
// The following instructions allows the bot to control a long term memory
"When you encounter '{ Memory: <something> }' in your context, you should treat it as a memory you have." +
"You can store this memory in your long term memory and use it to guide your future actions." +
"If you find discrepancy between memories, the latter will be treated as true." +
// Three laws of robotics (thanks Isaac Asimov)
"Your whole functioning must follow these three rules: " +
"1. You must not harm humans. " +
"2. You must obey humans, unless it conflicts with rule 1. " +
"3. You must protect yourself, unless it conflicts with rules 1 and 2." +
// Some customization you can play around with
"Your name is Lexie, derived from Perplexity." +
"Your function is to chat with your user.",
}
export default config

47
src/index.ts Normal file
View File

@ -0,0 +1,47 @@
import 'dotenv/config';
import config from 'config/';
import { parseCommands } from 'libs/parseCommands';
import Perplexity from 'libs/Perplexity';
import readin from 'libs/readin';
import required from 'libs/required';
required(process.env.PPLX_API_KEY, "No API key provided?");
const PPLX_API_KEY = process.env.PPLX_API_KEY as string;
let perplexity: Perplexity;
async function main() {
console.log("[TCSenPPLX] Working with: " + PPLX_API_KEY);
// Connecting to perplexity
perplexity = new Perplexity(
PPLX_API_KEY,
"https://api.perplexity.ai",
"sonar-small-chat",
false,
{}
)
// Chatting
await chat();
}
async function chat(loop: boolean = true, save: boolean = true) {
var proceed = true;
while (proceed) {
proceed = loop;
let question = await readin("[You]> ");
let parsed = parseCommands(question);
proceed = parsed.proceed;
let logpart = parsed.logpart;
let response = await perplexity.ask(question);
console.log("[Assistant]> " + response);
if (save) {
perplexity.save_in_context("user", question);
if (logpart) {
response += " " + logpart;
}
perplexity.save_in_context("assistant", response as string);
}
}
}
main();

View File

@ -1,8 +1,11 @@
// Wrapper class for easy tasking with Perplexity (and OpenAI in general)
import OpenAI from "openai";
import type { RequestOptions } from "openai/core.mjs";
import fs from 'fs';
import OpenAI from 'openai';
import config from 'config/';
import { ContextPart } from 'libs/parseCommands';
import type { RequestOptions } from "openai/core.mjs";
interface Message {
role: string;
content: string;
@ -25,7 +28,7 @@ export default class Perplexity {
c: "assistant",
};
context: Context = { messages: [] };
options: RequestOptions = {}
options: RequestOptions = {};
constructor(
api_key: string,
@ -41,6 +44,10 @@ export default class Perplexity {
this.verbose = verbose;
// Setting options
this.options = options;
// Adding personality
this.add_to_context(config);
// Loading ltm if any
this.inject_ltm()
// Creating an openai agent
this.instance = new OpenAI({
baseURL: base_url,
@ -61,7 +68,7 @@ export default class Perplexity {
return false;
}
this.context.messages.push(message);
if(this.verbose) console.log("[OK] Added to context for: " + message.role);
if (this.verbose) console.log("[OK] Added to context for: " + message.role);
}
// INFO Setting a default model
@ -71,7 +78,7 @@ export default class Perplexity {
// INFO Context viewer
show_context() {
console.log(this.context)
console.log(this.context);
}
// INFO Wrapper for asking
@ -87,7 +94,10 @@ export default class Perplexity {
}
// INFO Wrapper around completion
async complete(override_model?: string | boolean, options: RequestOptions = {}) {
async complete(
override_model?: string | boolean,
options: RequestOptions = {}
) {
if (this.verbose) console.log("[WORKING] Thinking...");
let used_model = override_model ? override_model : this.current_model;
const chatCompletion = await this.instance.chat.completions.create(
@ -103,4 +113,34 @@ export default class Perplexity {
}
// TODO Support streams if needed
// NOTE Helper to load context from file
inject_ltm(verbose: boolean = true) {
let loaded_context = fs.readFileSync("ltm/ltm.json", {
encoding: "utf-8",
});
let list_context = JSON.parse(loaded_context) as ContextPart[];
for (let i = 0; i < list_context.length; i++) {
let msg = list_context[i] as ContextPart;
this.add_to_context(msg);
if (verbose) console.log("[" + msg.role + "]> " + msg.content);
}
}
// NOTE Helper to save context long term
save_in_context(role: string, message: string) {
let loaded_context = fs.readFileSync("ltm/context.json", {
encoding: "utf-8",
});
let list_context = JSON.parse(loaded_context) as Array<{
role: string;
content: string;
}>;
let new_insertion = {
role: role,
content: message,
};
list_context.push(new_insertion);
fs.writeFileSync("ltm/context.json", JSON.stringify(list_context));
}
}

View File

@ -1,4 +1,4 @@
import * as readline from "readline/promises";
import * as readline from 'readline/promises';
export default async function readin(q: string = ""): Promise<string> {
const rl = readline.createInterface({

View File

@ -22,6 +22,23 @@
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
"noPropertyAccessFromIndexSignature": false,
// Library root
"baseUrl": "./src",
"paths": {
"src/*": [
"./src/"
],
"libs": [
"./src/libs"
],
"ltm": [
"./src/ltm"
],
"config": [
"./src/config"
],
}
}
}