oasst1-guanaco-extended / derive_guancao.py
winglian's picture
Upload 2 files
cfb13df
import json
import logging
import pandas as pd
from datasets import load_dataset, Dataset
train_dataset = load_dataset("OpenAssistant/oasst1")["train"]
def get_children(df, parent_ids):
children = df[df['parent_id'].isin(parent_ids)]
return children.sort_values('rank', ascending=True).drop_duplicates('parent_id')
def trace_conversations(df, parent_ids):
conversations = []
children = get_children(df, parent_ids)
while not children.empty:
conversations.extend(children.to_dict('records'))
parent_ids = children['message_id']
children = get_children(df, parent_ids)
return conversations
# Convert the HuggingFace's dataset to pandas dataframe
df = pd.DataFrame.from_records(train_dataset)
# Get the root nodes
root_nodes = df[df['parent_id'].isnull()]
conversations = []
for idx, root in root_nodes.iterrows():
conversation_chain = [root.to_dict()]
conversation_chain.extend(trace_conversations(df, [root['message_id']]))
conversations.append(conversation_chain)
# Select only necessary columns for each conversation
for conversation in conversations:
for message in conversation:
keys_to_delete = set(message.keys()) - {'message_id', 'parent_id', 'role', 'text'}
for key in keys_to_delete:
del message[key]
# Create a new dataframe with only the 'conversations' field
result_df = pd.DataFrame({'conversations': conversations})
# Convert dataframe back to HuggingFace's dataset
result_dataset = Dataset.from_pandas(result_df)
logging.info(result_dataset)
with open("guanaco.jsonl", "w") as f:
for row in result_dataset:
f.write(json.dumps(row))