Spaces:
Runtime error
Runtime error
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import requests | |
import gradio as gr | |
# Load the model and tokenizer from Hugging Face | |
tokenizer = AutoTokenizer.from_pretrained("LaierTwoLabsInc/Satoshi-7B") | |
model = AutoModelForCausalLM.from_pretrained("LaierTwoLabsInc/Satoshi-7B") | |
# Function to fetch BTC price from CoinGecko API | |
def fetch_btc_price(): | |
url = "https://api.coingecko.com/api/v3/simple/price" | |
params = {'ids': 'bitcoin', 'vs_currencies': 'usd'} | |
response = requests.get(url, params=params) | |
if response.status_code == 200: | |
data = response.json() | |
return data['bitcoin']['usd'] | |
return None | |
# Function to generate a response based on the prompt | |
def generate_custom_response(prompt): | |
# Encode the input prompt | |
inputs = tokenizer(prompt, return_tensors="pt") | |
# Generate a response from the model | |
outputs = model.generate(inputs['input_ids'], max_length=200, num_return_sequences=1) | |
# Decode the generated response | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return response | |
# Function to fetch BTC price and generate analysis based on the prompt | |
def btc_analysis(prompt): | |
btc_price = fetch_btc_price() | |
if btc_price: | |
full_prompt = f"Bitcoin's current price is ${btc_price}. {prompt}" | |
ai_response = generate_custom_response(full_prompt) | |
return ai_response | |
else: | |
return "Error fetching Bitcoin price." | |
# Gradio Interface for BTC analysis | |
interface = gr.Interface( | |
fn=btc_analysis, | |
inputs=gr.Textbox(value="What does this price mean for investors and the market?", label="Prompt"), | |
outputs="text", | |
title="Bitcoin Price Analysis", | |
description="Fetch Bitcoin's current price and get analysis based on the provided prompt using Hugging Face's model." | |
) | |
# Launch the Gradio app | |
interface.launch() | |