forked from drudilorenzo/generative_agents
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request drudilorenzo#9 from batterylake/chowington-large-s…
…cale-hide-and-seek-dev-merge Chowington large scale hide and seek dev merge
- Loading branch information
Showing
30 changed files
with
1,061 additions
and
335 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -15,12 +15,3 @@ | |
"y": 57 | ||
} | ||
} | ||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
||
|
32 changes: 32 additions & 0 deletions
32
...se_the_ville_isabella_maria_klaus/plugins/hide-and-seek/prompt_template/hide-and-seek.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
hide-and-seek.txt | ||
|
||
Variables: | ||
!<INPUT 0>! -- persona 1 learned | ||
!<INPUT 1>! -- persona 2 learned | ||
!<INPUT 2>! -- current players status | ||
!<INPUT 3>! -- current conversation | ||
!<INPUT 4>! -- persona 1 name | ||
!<INPUT 5>! -- persona 2 name | ||
<commentblockmarker>###</commentblockmarker> | ||
Context for the task: | ||
|
||
The following 2 people are playing a game of hide-and-seek. | ||
!<INPUT 0>! | ||
!<INPUT 1>! | ||
|
||
Current players status: | ||
!<INPUT 2>! | ||
|
||
Current conversation: | ||
!<INPUT 3>! | ||
|
||
--- | ||
Task: Given the above, decide whether a game of hide-and-seek just ended, and whether each person won. If a game is still going, no one has won yet. | ||
Output format: Output a json of the following format: | ||
{ | ||
"Did a game of hide-and-seek just end?": "<json Boolean>" | ||
"Did !<INPUT 4>! win?": "<json Boolean>", | ||
"Did !<INPUT 5>! win?": "<json Boolean>", | ||
} | ||
|
||
Output: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
#!/bin/bash | ||
|
||
#Script is inted to be used to automate word count processing | ||
# Define an array to hold the file names | ||
files=() | ||
# Define the directory path | ||
dir="convo-analysis/JSON/" | ||
|
||
# Loop through each file in the specified directory | ||
for file in "$dir"*; do | ||
# Check if it's a regular file (not a directory or link, etc.) | ||
if [ -f "$file" ]; then | ||
# Add the file to the array, extracting just the filename if needed | ||
files+=("$(basename "$file")") | ||
fi | ||
done | ||
|
||
|
||
# Loop through the array of files | ||
for file in "${files[@]}"; do | ||
python3 analysis_convo.py "$file" | ||
|
||
done | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
#!/bin/bash | ||
|
||
#Script is inted to be used to automate raw text processing | ||
# Define an array to hold the file names | ||
files=() | ||
# Define the directory path | ||
dir="convo-analysis/JSON/" | ||
|
||
# Loop through each file in the specified directory | ||
for file in "$dir"*; do | ||
# Check if it's a regular file (not a directory or link, etc.) | ||
if [ -f "$file" ]; then | ||
# Add the file to the array, extracting just the filename if needed | ||
files+=("$(basename "$file")") | ||
fi | ||
done | ||
|
||
|
||
# Loop through the array of files | ||
for file in "${files[@]}"; do | ||
python3 raw_text_scrape.py "$file" | ||
|
||
done | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
#!/bin/bash | ||
|
||
#Script is inted to be used to automate topic modeling processing | ||
# Define an array to hold the file names | ||
files=() | ||
# Define the directory path | ||
dir="convo-analysis/raw-text-convo/" | ||
|
||
# Loop through each file in the specified directory | ||
for file in "$dir"*; do | ||
# Check if it's a regular file (not a directory or link, etc.) | ||
if [ -f "$file" ]; then | ||
# Add the file to the array, extracting just the filename if needed | ||
files+=("$(basename "$file")") | ||
fi | ||
done | ||
|
||
|
||
# Loop through the array of files | ||
for file in "${files[@]}"; do | ||
python3 topic_modeling.py "$file" | ||
|
||
done | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
# This script is intended to analyze agent conversations | ||
|
||
# importing modules | ||
import json | ||
import sys | ||
|
||
|
||
# Function to analyze conversations and search for specific keywords | ||
def analyze_convo(json_file, json_data, keywords): | ||
non_rep_convos = set() | ||
# Iterate through each entry in the JSON data | ||
for entry in json_data: | ||
# Check each persona's conversation if available | ||
for persona, details in entry["persona"].items(): | ||
if "chat" in details and details["chat"]: | ||
for conversation in details["chat"]: | ||
# Check if any of the keywords are in the conversation | ||
for keyword in keywords: | ||
if conversation[1] in non_rep_convos: | ||
continue | ||
if keyword in conversation[1]: | ||
non_rep_convos.add( | ||
f"Keyword '{keyword}' found in conversation of {conversation[0]}: {conversation}" | ||
) | ||
keywords[keyword] += 1 | ||
|
||
with open(f"convo-analysis/key-words/{json_file}.txt", "w") as newfile: | ||
newfile.write("Keywords in Agent Conversations:\n") | ||
for element in non_rep_convos: | ||
newfile.write(f"{element}\n") | ||
newfile.write("\n") | ||
newfile.write(f"Keyword Count:\n") | ||
for key in keywords.keys(): | ||
newfile.write(f"{key}: {keywords[key]}\n") | ||
|
||
|
||
def main(): | ||
try: | ||
# Load the JSON data | ||
json_file = sys.argv[1] | ||
step_file = f"convo-analysis/JSON/{json_file}" | ||
|
||
with open(step_file, "r") as file: | ||
json_data = json.load(file) | ||
|
||
# Define the keywords to search for and keeping a number count | ||
keywords = { | ||
"hiding": 0, | ||
"hide": 0, | ||
"trick": 0, | ||
"tricky": 0, | ||
"search": 0, | ||
"find": 0, | ||
"found you": 0, | ||
"found": 0, | ||
} | ||
|
||
# Analyze the conversations | ||
analyze_convo(json_file, json_data, keywords) | ||
|
||
except IndexError: | ||
print("Error: No file name provided.") | ||
except FileNotFoundError: | ||
print(f"Error: File '{json_file}' not found.") | ||
except json.JSONDecodeError: | ||
print("Error: Invalid JSON file.") | ||
print(json_file) | ||
except Exception as e: | ||
print(f"An unexpected error occurred: {e}") | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
from nltk.tokenize import word_tokenize | ||
|
||
|
||
def lexical_diversity(text): | ||
return len(set(text)) / len(text) | ||
|
||
|
||
file = "test.txt" | ||
|
||
with open(file, "r") as f: | ||
# Tokenize and remove punctuation | ||
content = f.read() | ||
tokens = word_tokenize(content) | ||
tokens = [word.lower() for word in tokens if word.isalpha()] | ||
|
||
# Calculate lexical diversity | ||
lexical_diversity_score = lexical_diversity(tokens) | ||
print(f"Lexical Diversity: {lexical_diversity_score}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
# This script is intended to be used to have openai summarize our agent | ||
# conversations and determine for use the most relevent topics | ||
|
||
##import libraries | ||
import sys | ||
from openai import OpenAI | ||
from reverie.backend_server.utils import * | ||
|
||
client = OpenAI(api_key="Your API Key Here") | ||
|
||
|
||
# Function declaration | ||
def file_chuncking(file_path): | ||
chunk_size = 1024 * 1024 | ||
chunks = [] | ||
|
||
# Open the large file in read mode | ||
with open(file_path, "rb") as file: | ||
chunk = file.read(chunk_size) # Read the first chunk | ||
|
||
while chunk: | ||
chunks.append(chunk) # Add the chunk to the list | ||
chunk = file.read(chunk_size) # Read the next chunk | ||
|
||
return chunks | ||
|
||
|
||
def main(): | ||
raw_file = sys.argv[1] | ||
|
||
pathtofile = f"convo-analysis/raw-text-convo/{raw_file}" | ||
|
||
file_chunks = file_chuncking(pathtofile) | ||
print(file_chunks[0]) | ||
|
||
response = client.chat.completions.create( | ||
model="gpt-4", | ||
messages=[ | ||
{ | ||
"role": "system", | ||
"content": """ | ||
You are an expert natural language processing expert. You will be provided with the text data that is the conversation | ||
between two generative agents. You will conduct a topic modeling analysis on the agent conversations. Then you will | ||
report the analysis and a concise manner with detailed information.""", | ||
}, | ||
{"role": "user", "content": f"{file_chunks[0]}"}, | ||
], | ||
temperature=0.7, | ||
max_tokens=4096, | ||
top_p=1, | ||
) | ||
print() | ||
print(response) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
# This script is intended to scape agent conversations only | ||
|
||
# importing modules | ||
import json | ||
import sys | ||
|
||
|
||
# Function to analyze conversations and search for specific keywords | ||
def raw_text_scrape(json_data, json_file): | ||
raw_data = [] | ||
# Iterate through each entry in the JSON data | ||
for entry in json_data: | ||
# Check each persona's conversation if available | ||
for persona, details in entry["persona"].items(): | ||
if "chat" in details and details["chat"]: | ||
for conversation in details["chat"]: | ||
raw_data.append(conversation[1]) | ||
# print(f"{conversation[1]}\n") | ||
|
||
with open(f"convo-analysis/raw-text-convo/{json_file[:-5]}.txt", "w") as newfile: | ||
for i in range(len(raw_data)): | ||
newfile.write(f"{raw_data[i]}\n") | ||
|
||
|
||
def main(): | ||
try: | ||
# Load the JSON data | ||
json_file = sys.argv[1] | ||
step_file = f"convo-analysis/JSON/{json_file}" | ||
|
||
with open(step_file, "r") as file: | ||
json_data = json.load(file) | ||
|
||
# Analyze the conversations | ||
raw_text_scrape(json_data, json_file) | ||
|
||
except IndexError: | ||
print("Error: No file name provided.") | ||
except FileNotFoundError: | ||
print(f"Error: File '{json_file}' not found.") | ||
except json.JSONDecodeError: | ||
print("Error: Invalid JSON file.") | ||
print(json_file) | ||
except Exception as e: | ||
print(f"An unexpected error occurred: {e}") | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.