From 0787d44add9d60bf0e28e61043d7656641e6d06e Mon Sep 17 00:00:00 2001 From: adnan wahab Date: Tue, 22 Oct 2024 16:50:48 -0500 Subject: [PATCH] cool --- 200k_prs/readme.md | 176 +++++++++++++++- package.json | 1 + scripts/_bootstrap.sh | 12 ++ scripts/code_base_indexing_for_prediction.ts | 198 ++++++++++++++++++ scripts/homelab_status_page.sh | 4 +- scripts/zoox_comprehension.py | 1 - web-ui/js/helpers/zig-helper.js | 1 - web-ui/my-app/src/index.ts | 28 +++ web-ui/my-app/src/llama-tools.jsx | 134 ++++++------ .../my-app/src/llama-tools/livekit_audio.html | 185 ++++++++-------- .../my-app/src/llama-tools/livekit_share.html | 42 ++-- 11 files changed, 582 insertions(+), 200 deletions(-) create mode 100644 package.json create mode 100755 scripts/code_base_indexing_for_prediction.ts delete mode 100755 scripts/zoox_comprehension.py diff --git a/200k_prs/readme.md b/200k_prs/readme.md index 0f98d090..11a67ce3 100644 --- a/200k_prs/readme.md +++ b/200k_prs/readme.md @@ -1,4 +1,174 @@ -dustynv/jetson-containers -observablheq/runtime -livekit/client-sdk-js + +https://github.com/dustynv/jetson-containers +https://github.com/observablheq/runtime +https://github.com/livekit/client-sdk-js +https://github.com/bazelbuild/bazel + + + +[ + { + "name": "GoLang", + "repo": "https://github.com/golang/go", + "dependencies": [ + { + "name": "gc (Go compiler)", + "description": "Go compiler toolchain" + }, + { + "name": "gccgo", + "description": "Alternative Go compiler based on GCC" + }, + { + "name": "cmd/compile", + "description": "Go compiler internal package with SSA and IR components" + }, + { + "name": "CGO", + "description": "Allows Go programs to call C code" + }, + { + "name": "Go Runtime", + "description": "Includes garbage collection, goroutines, and channels" + } + ] + }, + { + "name": "Node.js", + "repo": "https://github.com/nodejs/node", + "dependencies": [ + { + "name": "V8 Engine", + "description": "JavaScript engine by Google" + }, + { + "name": "libuv", + "description": "Provides event loop and asynchronous I/O" + }, + { + "name": "c-ares", + "description": "A C library for asynchronous DNS requests" + }, + { + "name": "OpenSSL", + "description": "Handles TLS and cryptographic operations" + }, + { + "name": "HTTP Parser", + "description": "Parses HTTP requests and responses" + }, + { + "name": "zlib", + "description": "Handles data compression" + } + ] + }, + { + "name": "Docker", + "repo": "https://github.com/moby/moby", + "dependencies": [ + { + "name": "runc", + "description": "CLI tool for running containers" + }, + { + "name": "containerd", + "description": "Manages container lifecycle" + }, + { + "name": "libnetwork", + "description": "Provides container networking" + }, + { + "name": "BuildKit", + "description": "Optimizes Docker build process" + }, + { + "name": "aufs and overlayfs", + "description": "Filesystems for container storage" + } + ] + }, + { + "name": "Kubernetes", + "repo": "https://github.com/kubernetes/kubernetes", + "dependencies": [ + { + "name": "etcd", + "description": "Distributed key-value store for cluster data" + }, + { + "name": "containerd", + "description": "Manages containers in Kubernetes" + }, + { + "name": "CoreDNS", + "description": "DNS for Kubernetes services" + }, + { + "name": "cAdvisor", + "description": "Monitors container resource usage" + }, + { + "name": "Flannel or Calico", + "description": "Networking plugins for pod networking" + } + ] + }, + { + "name": "Slurm", + "repo": "https://github.com/SchedMD/slurm", + "dependencies": [ + { + "name": "MySQL or MariaDB", + "description": "Database for job and cluster data" + }, + { + "name": "PMIx", + "description": "Process management standard" + }, + { + "name": "Munge", + "description": "Authentication system for users and hosts" + }, + { + "name": "hwloc", + "description": "Manages hardware topology" + } + ] + }, + { + "name": "Java (OpenJDK)", + "repo": "https://github.com/openjdk/jdk", + "dependencies": [ + { + "name": "JVM", + "description": "Java Virtual Machine for running Java applications" + }, + { + "name": "javac", + "description": "Java compiler" + }, + { + "name": "JRE libraries", + "description": "Standard libraries for utilities, networking, and I/O" + }, + { + "name": "HotSpot", + "description": "Default JVM for memory management and garbage collection" + } + ] + } +] +https://github.com/scverse/scanpy + + +pytorch or tinygrad + +//this is the list of deps to sort by prtioity +{ +"pytorch": [], +"tinygrad": [], + +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..ffbbe38d --- /dev/null +++ b/package.json @@ -0,0 +1 @@ +{ "dependencies": { "ollama": "^0.5.9" } } \ No newline at end of file diff --git a/scripts/_bootstrap.sh b/scripts/_bootstrap.sh index b62a5517..8d326152 100755 --- a/scripts/_bootstrap.sh +++ b/scripts/_bootstrap.sh @@ -1,6 +1,18 @@ op item list --vault=personal --tags=api --format json + + +alias pull="git pull" +alias commit="git add . && git commit -am 'simplify' && git push origin main" +alias status="git status" + +# Adding alias to bashrc for best practices +# echo "alias ll='ls -l'" >> ~/.bashrc +# echo "alias la='ls -a'" >> ~/.bashrc +# echo "alias ll='ls -l'" >> ~/.bashrc +# echo "alias grep='grep --color=auto'" >> ~/.bashrc + # Symlink each category into a data folder in the root of this git repo #chmod +x scripts/* # ln -s ~/derp/actions ~/homelab_status_page/data/actions diff --git a/scripts/code_base_indexing_for_prediction.ts b/scripts/code_base_indexing_for_prediction.ts new file mode 100755 index 00000000..b2a2cbeb --- /dev/null +++ b/scripts/code_base_indexing_for_prediction.ts @@ -0,0 +1,198 @@ +//# zoox is a list of modules - make a comprehension to map filter into a json +import { Ollama } from 'ollama' + + +//ffi -- - learn p -> js + +const read_location = `/home/adnan/derp/ro/mini_zoox/` +import { fstat } from 'fs'; +// Import necessary modules +import { readdir, stat, readFile, mkdir } from 'fs/promises'; +import path from 'path'; + +// Configuration +const ROOT_DIR = read_location +const OUTPUT_DIR = '/home/adnan/derp/ro'; + +// Utility function to get all files recursively +async function getAllFiles(dirPath, arrayOfFiles) { + const files = await readdir(dirPath); + + for (const file of files) { + const fullPath = path.join(dirPath, file); + const fileStat = await stat(fullPath); + + if (fileStat.isDirectory()) { + await getAllFiles(fullPath, arrayOfFiles); + } else { + arrayOfFiles.push(fullPath); + } + } + + return arrayOfFiles; +} + +// Task 1: Generate stats about all files +async function generateFileStats(files) { + const fileStats = []; + + for (const file of files) { + const fileContent = await readFile(file, 'utf-8'); + const fileStat = await stat(file); + const lines = fileContent.split('\n').length; + + fileStats.push({ + path: file, + size: fileStat.size, + lines: lines, + extension: path.extname(file), + modifiedDate: fileStat.mtime, + }); + } + + return fileStats; +} + +// Task 2: Find dependencies of each file +async function findDependencies(files) { + const dependencies = {}; + + for (const file of files) { + const fileContent = await readFile(file, 'utf-8'); + const importRegex = /import .* from ['"](.*)['"];?/g; + const requireRegex = /require\(['"](.*)['"]\)/g; + const deps = []; + let match; + + while ((match = importRegex.exec(fileContent)) !== null) { + deps.push(match[1]); + } + + while ((match = requireRegex.exec(fileContent)) !== null) { + deps.push(match[1]); + } + + dependencies[file] = deps; + } + + return dependencies; +} + +// Task 3: Extract all notes (comments) +async function extractComments(files) { + const comments = {}; + + for (const file of files) { + const fileContent = await readFile(file, 'utf-8'); + const commentRegex = /\/\/(.*)|\/\*([\s\S]*?)\*\//g; + const fileComments = []; + let match; + + while ((match = commentRegex.exec(fileContent)) !== null) { + fileComments.push(match[1] || match[2]); + } + + comments[file] = fileComments; + } + + return comments; +} + +// Task 4: Count how many files depend on each file +function countReverseDependencies(dependencies) { + const reverseDeps = {}; + + for (const [file, deps] of Object.entries(dependencies)) { + for (const dep of deps) { + if (!reverseDeps[dep]) { + reverseDeps[dep] = []; + } + reverseDeps[dep].push(file); + } + } + + const dependencyCount = {}; + + for (const [file, dependents] of Object.entries(reverseDeps)) { + dependencyCount[file] = dependents.length; + } + + return dependencyCount; +} + + + +async function code_base_indexing_for_prediction() { + // Ensure the output directory exists + await mkdir(OUTPUT_DIR, { recursive: true }); // Create the directory if it doesn't exist + + // Step 1: Get all files + const allFiles = await getAllFiles(ROOT_DIR, []); + + // Task 1 + const stats = await generateFileStats(allFiles); + await Bun.write(`${OUTPUT_DIR}/fileStats.json`, JSON.stringify(stats, null, 2)); + + // Task 2 + const dependencies = await findDependencies(allFiles); + await Bun.write(`${OUTPUT_DIR}/dependencies.json`, JSON.stringify(dependencies, null, 2)); + + // Task 3 + const notes = await extractComments(allFiles); + await Bun.write(`${OUTPUT_DIR}/notes.json`, JSON.stringify(notes, null, 2)); + + // Task 4 + const reverseDependencyCount = countReverseDependencies(dependencies); + await Bun.write(`${OUTPUT_DIR}/reverseDependencies.json`, JSON.stringify(reverseDependencyCount, null, 2)); + + console.log('All tasks completed successfully!'); +} + + +// Execute the main function + + +// code_base_indexing_for_prediction().catch((error) => { +// console.error('An error occurred:', error); +// }); + + +// Main function to run all tasks +let view_location = `https://files.hashirama.blog/derp/intermediate_representation/micro_zoox/index.json` + + + + + +async function robot_lamma () { + + const stats = await Bun.file(`${OUTPUT_DIR}/fileStats.json`).json(); + const dependencies = await Bun.file(`${OUTPUT_DIR}/dependencies.json`).json(); + const notes = await Bun.file(`${OUTPUT_DIR}/notes.json`).json(); + + const file_names = Object.keys(dependencies); + + console.log(file_names.length) + + + const ollama = new Ollama({ host: 'http://127.0.0.1:11434' }) + + //ollama_client + const message = { + role: "user", + content: "what does this file mean?" + } + file_names.slice(0, 20).map(async (file_name) => { + const file_content = await Bun.file(file_name).text() + const response = await ollama.chat({ + model: 'llama3.2', + input: file_content, + messages: [message] + }) + }) + + //const reverseDependencyCount = await Bun.file(`${OUTPUT_DIR}/reverseDependencies.json`).json(); -- too long +} +console.time("code_base_indexing_for_prediction"); +robot_lamma() +console.timeEnd("code_base_indexing_for_prediction") diff --git a/scripts/homelab_status_page.sh b/scripts/homelab_status_page.sh index e6b835d9..be93bf3e 100755 --- a/scripts/homelab_status_page.sh +++ b/scripts/homelab_status_page.sh @@ -15,9 +15,9 @@ if [ "$COMMAND" = "kentbeck" ] || [ "$COMMAND" = "build.sh" ]; then echo "Running run.sh..." bun run ~/homelab_status_page/web-ui/js/helpers/Kent_Beck_robusteness.js echo "Running run.sh..." -elif [ "$COMMAND" = "run" ] || [ "$COMMAND" = "run.sh" ]; then +elif [ "$COMMAND" = "ro" ] || [ "$COMMAND" = "run.sh" ]; then #$ROOT/run.sh "$@" - echo "Running run.sh..." + bun run ~/homelab_status_page/scripts/code_base_indexing_for_prediction.ts elif [ "$COMMAND" = "list" ]; then # $ROOT/build.sh --list "$@" echo "Running run.sh..." diff --git a/scripts/zoox_comprehension.py b/scripts/zoox_comprehension.py deleted file mode 100755 index cbbc1859..00000000 --- a/scripts/zoox_comprehension.py +++ /dev/null @@ -1 +0,0 @@ -# zoox is a list of modules - make a comprehension to map filter into a json \ No newline at end of file diff --git a/web-ui/js/helpers/zig-helper.js b/web-ui/js/helpers/zig-helper.js index 36691109..e69de29b 100644 --- a/web-ui/js/helpers/zig-helper.js +++ b/web-ui/js/helpers/zig-helper.js @@ -1 +0,0 @@ -//ffi -- - learn pytorch or tinygrad -> js \ No newline at end of file diff --git a/web-ui/my-app/src/index.ts b/web-ui/my-app/src/index.ts index 9389b444..d208aa6f 100644 --- a/web-ui/my-app/src/index.ts +++ b/web-ui/my-app/src/index.ts @@ -61,6 +61,34 @@ app.all('/iframe/*', (c) => { //let html = 'yay' return c.html(html) }) + + + +import { WebhookReceiver } from 'livekit-server-sdk'; + +const receiver = new WebhookReceiver('apikey', 'apisecret'); + +// In order to use the validator, WebhookReceiver must have access to the raw +// POSTed string (instead of a parsed JSON object). If you are using express +// middleware, ensure that `express.raw` is used for the webhook endpoint +// app.use(express.raw({type: 'application/webhook+json'})); + +app.post('/webhook-endpoint', async (c) => { + // Event is a WebhookEvent object + const event = await receiver.receive(await c.req.raw(), c.req.get('Authorization')); + + + const payload = await c.req.json(); // Get JSON payload from request body + console.log('Webhook received:', payload); + + // Process the webhook data here... + + console.log('event', event, payload, c.req.get('Authorization')) + + return c.json({ success: true }); +}); + + //after 1k signups - lower price to course by 10% by 1k till $5 for life. //only need 20k per year - (20k / 100) = (goal: 200per year) - rest -> reinveest to hardware //1 buy per day = all beings (awaken + flourish) diff --git a/web-ui/my-app/src/llama-tools.jsx b/web-ui/my-app/src/llama-tools.jsx index b9a1679a..5106433f 100644 --- a/web-ui/my-app/src/llama-tools.jsx +++ b/web-ui/my-app/src/llama-tools.jsx @@ -1,85 +1,77 @@ +import { useState } from 'react'; + export default function Example() { - return ( -
-
-

Deploy faster

-

- LLama Tools -

-
-
-
- {/* */} -