|
import json |
|
import logging |
|
|
|
import pandas as pd |
|
from datasets import load_dataset, Dataset |
|
|
|
train_dataset = load_dataset("OpenAssistant/oasst1")["train"] |
|
|
|
|
|
def get_children(df, parent_ids): |
|
children = df[df['parent_id'].isin(parent_ids)] |
|
return children.sort_values('rank', ascending=True).drop_duplicates('parent_id') |
|
|
|
|
|
def trace_conversations(df, parent_ids): |
|
conversations = [] |
|
children = get_children(df, parent_ids) |
|
|
|
while not children.empty: |
|
conversations.extend(children.to_dict('records')) |
|
parent_ids = children['message_id'] |
|
children = get_children(df, parent_ids) |
|
|
|
return conversations |
|
|
|
|
|
df = pd.DataFrame.from_records(train_dataset) |
|
|
|
|
|
root_nodes = df[df['parent_id'].isnull()] |
|
|
|
conversations = [] |
|
for idx, root in root_nodes.iterrows(): |
|
conversation_chain = [root.to_dict()] |
|
conversation_chain.extend(trace_conversations(df, [root['message_id']])) |
|
conversations.append(conversation_chain) |
|
|
|
|
|
for conversation in conversations: |
|
for message in conversation: |
|
keys_to_delete = set(message.keys()) - {'message_id', 'parent_id', 'role', 'text'} |
|
for key in keys_to_delete: |
|
del message[key] |
|
|
|
|
|
result_df = pd.DataFrame({'conversations': conversations}) |
|
|
|
|
|
result_dataset = Dataset.from_pandas(result_df) |
|
|
|
logging.info(result_dataset) |
|
|
|
with open("guanaco.jsonl", "w") as f: |
|
for row in result_dataset: |
|
f.write(json.dumps(row)) |
|
|