2026-01-22 19:36:23 +00:00
#!/usr/bin/env python3
"""
Market Research Script
2026-01-23 19:50:48 +00:00
Connects to Gemini and Jules to analyze market data and generate research reports .
2026-01-22 19:36:23 +00:00
"""
import os
import json
import logging
2026-01-23 19:50:48 +00:00
import requests
2026-01-25 05:36:41 +00:00
import concurrent . futures
2026-01-25 19:44:13 +00:00
# TODO: google.generativeai is deprecated, migrate to google.genai in future
# import google.genai as genai
2026-01-22 19:36:23 +00:00
from datetime import datetime
from pathlib import Path
2026-01-23 19:50:48 +00:00
from dotenv import load_dotenv
2026-01-22 19:36:23 +00:00
# Setup logging
logging . basicConfig ( level = logging . INFO , format = ' %(asctime)s - %(levelname)s - %(message)s ' )
logger = logging . getLogger ( __name__ )
REPO_ROOT = Path ( __file__ ) . resolve ( ) . parents [ 1 ]
DOCS_DIR = REPO_ROOT / " docs "
DATA_DIR = REPO_ROOT / " data "
2026-01-23 19:50:48 +00:00
# Load environment variables
load_dotenv ( )
2026-01-22 19:36:23 +00:00
def get_market_data ( ) :
"""
2026-01-23 19:50:48 +00:00
Fetch market data using yfinance if available , otherwise use simulation .
2026-01-22 19:36:23 +00:00
"""
2026-01-23 19:50:48 +00:00
data = None
2026-01-28 05:32:18 +00:00
# ⚡ Optimization: Lazy import heavy dependencies
try :
import yfinance as yf
import pandas as pd
yfinance_available = True
except ImportError :
yfinance_available = False
if yfinance_available :
2026-01-22 19:36:23 +00:00
try :
2026-01-23 19:50:48 +00:00
logger . info ( " Fetching real market data via yfinance... " )
symbols = [ " EURUSD=X " , " GBPUSD=X " , " GC=F " , " BTC-USD " ]
market_data = { " timestamp " : datetime . now ( ) . isoformat ( ) , " symbols " : { } }
2026-01-27 01:00:42 +00:00
# ⚡ Performance Optimization: Batch fetch all symbols to reduce HTTP requests (~3x speedup)
2026-01-26 05:13:57 +00:00
try :
# group_by='ticker' ensures consistent structure (MultiIndex if >1 symbol)
tickers_data = yf . download ( symbols , period = " 14d " , interval = " 1d " , group_by = ' ticker ' , progress = False )
except Exception as e :
logger . error ( f " Bulk download failed: { e } " )
tickers_data = None
if tickers_data is not None :
for sym in symbols :
try :
hist = None
# Extract data for specific symbol
if isinstance ( tickers_data . columns , pd . MultiIndex ) :
if sym in tickers_data . columns . levels [ 0 ] :
hist = tickers_data [ sym ]
else :
# Fallback if structure is not MultiIndex (unlikely with group_by='ticker')
hist = tickers_data
if hist is not None and not hist . empty :
# Clean up data
if ' Close ' in hist . columns :
hist = hist . dropna ( subset = [ ' Close ' ] )
else :
continue
if hist . empty :
continue
current_price = hist [ ' Close ' ] . iloc [ - 1 ]
# Check if we have enough data
prev_price = hist [ ' Close ' ] . iloc [ - 2 ] if len ( hist ) > 1 else current_price
# Simple Trend (Price vs 5-day SMA)
sma_5 = hist [ ' Close ' ] . tail ( 5 ) . mean ( )
trend = " UP " if current_price > sma_5 else " DOWN "
# Volatility (High - Low)
daily_range = hist [ ' High ' ] . iloc [ - 1 ] - hist [ ' Low ' ] . iloc [ - 1 ]
volatility = " HIGH " if daily_range > ( current_price * 0.01 ) else " LOW " # Arbitrary threshold
market_data [ " symbols " ] [ sym ] = {
" price " : round ( current_price , 4 ) ,
" trend " : trend ,
" volatility " : volatility ,
" history_last_5_closes " : [ round ( x , 4 ) for x in hist [ ' Close ' ] . tail ( 5 ) . tolist ( ) ]
}
except Exception as e :
logger . warning ( f " Failed to process { sym } : { e } " )
2026-01-23 19:50:48 +00:00
if market_data [ " symbols " ] :
data = market_data
2026-01-22 19:36:23 +00:00
except Exception as e :
2026-01-23 19:50:48 +00:00
logger . error ( f " yfinance failed: { e } " )
2026-01-22 19:36:23 +00:00
2026-01-23 19:50:48 +00:00
if data :
return data
logger . info ( " Using simulated/fallback market data. " )
2026-01-22 19:36:23 +00:00
return {
" timestamp " : datetime . now ( ) . isoformat ( ) ,
" symbols " : {
" EURUSD " : { " price " : 1.0850 , " trend " : " UP " , " rsi " : 65.5 , " volatility " : " MEDIUM " } ,
" GBPUSD " : { " price " : 1.2700 , " trend " : " SIDEWAYS " , " rsi " : 50.2 , " volatility " : " LOW " } ,
" XAUUSD " : { " price " : 2030.50 , " trend " : " DOWN " , " rsi " : 35.0 , " volatility " : " HIGH " }
}
}
def analyze_with_gemini ( data ) :
"""
Send data to Gemini for analysis .
"""
2026-01-23 19:50:48 +00:00
api_key = os . environ . get ( " GEMINI_API_KEY " ) or os . environ . get ( " GOOGLE_API_KEY " )
2026-01-22 19:36:23 +00:00
if not api_key :
2026-01-23 19:50:48 +00:00
logger . warning ( " GEMINI_API_KEY/GOOGLE_API_KEY not found. Skipping Gemini analysis. " )
return None
2026-01-22 19:36:23 +00:00
try :
2026-01-28 05:32:18 +00:00
# ⚡ Optimization: Lazy import google.generativeai
import google . generativeai as genai
2026-01-22 19:36:23 +00:00
genai . configure ( api_key = api_key )
2026-01-23 19:50:48 +00:00
# Fallback models if one isn't available
2026-01-24 19:54:18 +00:00
model_name = os . environ . get ( " GEMINI_MODEL " , ' gemini-2.0-flash ' )
2026-01-23 19:50:48 +00:00
model = genai . GenerativeModel ( model_name )
2026-01-22 19:36:23 +00:00
prompt = f """
Analyze the following market data and provide a research report for a trading bot .
Focus on :
1. Current market regime ( Trending , Ranging , Volatile ) .
2026-01-23 19:50:48 +00:00
2. Potential trade setups based on Price Action and Trend .
2026-01-22 19:36:23 +00:00
3. Risk management suggestions .
Data :
{ json . dumps ( data , indent = 2 ) }
"""
response = model . generate_content ( prompt )
return response . text
except Exception as e :
logger . error ( f " Gemini analysis failed: { e } " )
return f " Gemini analysis failed: { e } "
2026-01-23 19:50:48 +00:00
def analyze_with_jules ( data ) :
"""
Send data to Jules for analysis .
"""
api_key = os . environ . get ( " JULES_API_KEY " )
api_url = os . environ . get ( " JULES_API_URL " )
model = os . environ . get ( " JULES_MODEL " , " jules-v1 " )
if not api_key or not api_url :
logger . warning ( " JULES_API_KEY or JULES_API_URL not found. Skipping Jules analysis. " )
return None
prompt = f """
You are an expert market analyst . Analyze the following market data and provide a research report for a trading bot .
Focus on :
1. Macro view and Sentiment .
2. Specific trade ideas .
3. Correlation analysis if multiple symbols provided .
Data :
{ json . dumps ( data , indent = 2 ) }
"""
headers = {
" Content-Type " : " application/json " ,
" Authorization " : f " Bearer { api_key } "
}
payload = {
" model " : model ,
" prompt " : prompt
}
try :
response = requests . post ( api_url , json = payload , headers = headers , timeout = 60 )
response . raise_for_status ( )
# Try to parse JSON response if applicable, or return text
try :
resp_json = response . json ( )
# Adjust based on actual API response structure
if " response " in resp_json :
return resp_json [ " response " ]
elif " choices " in resp_json and len ( resp_json [ " choices " ] ) > 0 :
return resp_json [ " choices " ] [ 0 ] . get ( " text " , str ( resp_json ) )
else :
return str ( resp_json )
except ValueError :
return response . text
except Exception as e :
logger . error ( f " Jules analysis failed: { e } " )
2026-01-27 20:07:35 +00:00
error_msg = f " Jules analysis failed: { e } "
if " NameResolutionError " in str ( e ) or " Failed to resolve " in str ( e ) :
error_msg + = " \n \n **Hint:** The Jules API URL might be incorrect. Please check `JULES_API_URL` in `.env`. If you intended to use the Jules CLI tool, note that this script attempts a REST API call. "
return error_msg
2026-01-23 19:50:48 +00:00
2026-01-22 19:36:23 +00:00
def main ( ) :
logger . info ( " Starting Market Research... " )
# Ensure directories exist
DOCS_DIR . mkdir ( exist_ok = True )
DATA_DIR . mkdir ( exist_ok = True )
data = get_market_data ( )
logger . info ( f " Market data loaded for { len ( data . get ( ' symbols ' , { } ) ) } symbols. " )
2026-01-23 19:50:48 +00:00
# Save raw data snapshot
with open ( DATA_DIR / " market_snapshot.json " , ' w ' ) as f :
json . dump ( data , f , indent = 2 )
2026-01-25 05:36:41 +00:00
# Parallelize AI analysis calls
with concurrent . futures . ThreadPoolExecutor ( ) as executor :
future_gemini = executor . submit ( analyze_with_gemini , data )
future_jules = executor . submit ( analyze_with_jules , data )
gemini_report = future_gemini . result ( )
jules_report = future_jules . result ( )
2026-01-22 19:36:23 +00:00
report_path = DOCS_DIR / " market_research_report.md "
timestamp = datetime . now ( ) . strftime ( " % Y- % m- %d % H: % M: % S " )
2026-01-23 19:50:48 +00:00
content = f " # Market Research Report \n \n Generated: { timestamp } \n \n "
if gemini_report :
content + = f " ## Gemini Analysis \n \n { gemini_report } \n \n "
if jules_report :
content + = f " ## Jules Analysis \n \n { jules_report } \n \n "
if not gemini_report and not jules_report :
content + = " ## Analysis Failed \n \n No AI providers were available or both failed. "
2026-01-22 19:36:23 +00:00
with open ( report_path , ' w ' ) as f :
f . write ( content )
logger . info ( f " Report saved to { report_path } " )
if __name__ == " __main__ " :
main ( )