|
import pandas as pd |
|
import openai |
|
from utils.openai import load_client |
|
|
|
client = load_client('<path-to-your-key-file>') |
|
|
|
|
|
openai.api_key = 'dE6qTITLy3WCoMPMCPr8tUFBHBaec5wN' |
|
|
|
|
|
file_path = '/home/yiyangai/stephenqs/datasets/physics_big/data/combined_images_non_empty.parquet' |
|
|
|
|
|
df = pd.read_parquet(file_path) |
|
|
|
|
|
def translate_text(text): |
|
if isinstance(text, list): |
|
return [translate_text(item) for item in text] |
|
elif isinstance(text, dict): |
|
return {key: translate_text(value) for key, value in text.items()} |
|
elif isinstance(text, str): |
|
response = openai.ChatCompletion.create( |
|
model="gpt-4o-mini", |
|
messages=[ |
|
{"role": "system", "content": "You are a helpful assistant."}, |
|
{"role": "user", "content": f"Translate this Russian text to English: {text}"} |
|
] |
|
) |
|
return response.choices[0].message['content'].strip() |
|
else: |
|
return text |
|
|
|
|
|
df_translated = pd.DataFrame(columns=df.columns) |
|
|
|
|
|
batch_size = 20 |
|
|
|
|
|
for i in range(0, len(df), batch_size): |
|
print(f"正在翻译样本 {i+1} 到 {min(i+batch_size, len(df))}...") |
|
batch = df.iloc[i:i+batch_size].copy() |
|
|
|
|
|
for column in batch.columns: |
|
batch[column] = batch[column].apply(translate_text) |
|
|
|
|
|
df_translated = pd.concat([df_translated, batch], ignore_index=True) |
|
|
|
|
|
df_translated.to_parquet('/home/yiyangai/stephenqs/datasets/physics_big/data/partial_translation.parquet') |
|
|
|
|
|
output_file_path = '/home/yiyangai/stephenqs/datasets/physics_big/data/translated_combined_images_non_empty.parquet' |
|
df_translated.to_parquet(output_file_path) |
|
|
|
print(f"翻译完成,已将结果保存到 {output_file_path}") |
|
|
|
|