-
Notifications
You must be signed in to change notification settings - Fork 18
Expand file tree
/
Copy pathday_trading_rss2.0.py
More file actions
317 lines (264 loc) · 11.1 KB
/
day_trading_rss2.0.py
File metadata and controls
317 lines (264 loc) · 11.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
import streamlit as st
import base64
import streamlit as st
import pandas as pd
import yfinance as yf
import numpy as np
import plotly.graph_objects as go
from datetime import datetime, timedelta
import feedparser
# Function to load the image and convert it to base64
def get_base64_of_bin_file(bin_file):
with open(bin_file, 'rb') as f:
data = f.read()
return base64.b64encode(data).decode()
# Path to the locally stored QR code image
qr_code_path = "qrcode.png" # Ensure the image is in your app directory
# Convert image to base64
qr_code_base64 = get_base64_of_bin_file(qr_code_path)
# Custom CSS to position the QR code close to the top-right corner under the "Deploy" area
st.markdown(
f"""
<style>
.qr-code {{
position: fixed; /* Keeps the QR code fixed in the viewport */
top: 10px; /* Sets the distance from the top of the viewport */
right: 10px; /* Sets the distance from the right of the viewport */
width: 200px; /* Adjusts the width of the QR code */
z-index: 100; /* Ensures the QR code stays above other elements */
}}
</style>
<img src="data:image/png;base64,{qr_code_base64}" class="qr-code">
""",
unsafe_allow_html=True
)
# Function to fetch data based on the selected period and stock symbol
def fetch_data(stock_symbol, interval, yf_period):
try:
data = yf.download(stock_symbol, period=yf_period, interval=interval)
if data.empty:
st.error(f"No data returned for ticker {stock_symbol}. Please check the ticker symbol or interval.")
return data
except Exception as e:
st.error(f"Error fetching data from Yahoo Finance: {e}")
return pd.DataFrame() # Return empty DataFrame
# Function to calculate support and resistance levels
def calculate_support_resistance(data, window=20):
if 'Low' not in data.columns or 'High' not in data.columns:
st.error("Data does not contain required columns for support and resistance calculation.")
return None, None
if len(data) < window:
st.error("Not enough data to calculate support and resistance.")
return None, None
try:
data['Support'] = data['Low'].rolling(window=window).min()
data['Resistance'] = data['High'].rolling(window=window).max()
latest_support = data['Support'].dropna().iloc[-1] if not data['Support'].dropna().empty else None
latest_resistance = data['Resistance'].dropna().iloc[-1] if not data['Resistance'].dropna().empty else None
if latest_support is None or latest_resistance is None:
st.error("Failed to retrieve latest support or resistance values.")
return latest_support, latest_resistance
except Exception as e:
st.error(f"Error calculating support and resistance: {e}")
return None, None
# Function to fetch stock news using RSS feed
def fetch_stock_news(stock_symbol):
url = "https://finance.yahoo.com/rss/headline?s=" + stock_symbol
try:
feed = feedparser.parse(url)
articles = []
for entry in feed.entries:
articles.append({
'title': entry.title,
'publishedAt': entry.published,
'url': entry.link
})
return articles
except Exception as e:
st.error(f"Error fetching news from RSS feed: {e}")
return []
# Function to identify engulfing candlesticks
def identify_engulfing_patterns(data):
data['Bullish Engulfing'] = (
(data['Open'] < data['Close'].shift(1)) &
(data['Close'] > data['Open'].shift(1)) &
(data['Close'] > data['Open']) &
(data['Open'].shift(1) > data['Close'].shift(1))
)
data['Bearish Engulfing'] = (
(data['Open'] > data['Close'].shift(1)) &
(data['Close'] < data['Open'].shift(1)) &
(data['Close'] < data['Open']) &
(data['Open'].shift(1) < data['Close'].shift(1))
)
return data
# Streamlit app
def main():
st.title("Stock Analysis with News and Engulfing Patterns")
# Sidebar for user input
st.sidebar.header("Settings")
stock_symbols = [
"AAPL", # Apple
"GOOGL", # Alphabet (Google)
"MSFT", # Microsoft
"AMZN", # Amazon
"TSLA", # Tesla
"META", # Meta Platforms (Facebook)
"NFLX", # Netflix
"NVDA", # NVIDIA
"INTC", # Intel
"AMD" # AMD
]
stock_symbol = st.sidebar.selectbox("Select Stock Symbol", stock_symbols)
# Adding a new dropdown for interval selection
interval = st.sidebar.selectbox(
"Select Interval",
["1m", "5m", "15m"]
)
interval_map = {
"1m": "1m",
"5m": "5m",
"15m": "15m"
}
# yf_period and interval setup
yf_period = "1d"
yf_interval = interval_map.get(interval, "1m") # Default to "1m" if not found
# Fetch data based on selected period, interval, and stock symbol
data = fetch_data(stock_symbol, yf_interval, yf_period)
if data.empty:
st.error("Failed to retrieve data. Please try again.")
return
# Identify engulfing patterns
data = identify_engulfing_patterns(data)
# Calculate daily returns
close_data = data['Close']
data_returns = close_data.pct_change().dropna()
# Define Lorentzian distance function
def lorentzian_distance(x, y):
return np.log(1 + (x - y)**2)
# Compute Lorentzian distances between consecutive returns
if len(data_returns) < 2:
st.warning("Not enough data to compute Lorentzian distances.")
lorentzian_distances = np.array([])
else:
lorentzian_distances = [lorentzian_distance(data_returns[i], data_returns[i + 1]) for i in range(len(data_returns) - 1)]
lorentzian_distances = np.array(lorentzian_distances)
# Define a threshold to identify anomalies
if len(lorentzian_distances) > 0:
threshold = lorentzian_distances.mean() + 2 * lorentzian_distances.std()
anomalies = lorentzian_distances > threshold
anomaly_indices = np.where(anomalies)[0]
anomaly_dates = data_returns.index[anomaly_indices]
else:
threshold = None
anomaly_dates = []
# Prepare the data for candlestick chart
data['Anomalies'] = np.where(data.index.isin(anomaly_dates), data['Close'], np.nan)
# Calculate support and resistance levels
latest_support, latest_resistance = calculate_support_resistance(data)
if latest_support is None or latest_resistance is None:
st.error("Failed to calculate support and resistance levels.")
return
# Create candlestick chart
fig = go.Figure()
# Add candlestick trace
fig.add_trace(go.Candlestick(
x=data.index,
open=data['Open'],
high=data['High'],
low=data['Low'],
close=data['Close'],
name='Candlestick'
))
# Add support and resistance lines
fig.add_trace(go.Scatter(
x=data.index,
y=[latest_support] * len(data),
mode='lines',
name='Support',
line=dict(color='green', width=2, dash='dash')
))
fig.add_trace(go.Scatter(
x=data.index,
y=[latest_resistance] * len(data),
mode='lines',
name='Resistance',
line=dict(color='blue', width=2, dash='dash')
))
# Add anomalies as scatter points
fig.add_trace(go.Scatter(
x=data.index,
y=data['Anomalies'],
mode='markers',
name='Anomalies',
marker=dict(color='yellow', size=10, symbol='x')
))
# Add bullish engulfing patterns
fig.add_trace(go.Scatter(
x=data[data['Bullish Engulfing']].index,
y=data[data['Bullish Engulfing']]['Low'] - (data['High'] - data['Low']).mean() * 0.1,
mode='markers',
name='Bullish Engulfing',
marker=dict(color='green', size=10, symbol='triangle-up')
))
# Add bearish engulfing patterns
fig.add_trace(go.Scatter(
x=data[data['Bearish Engulfing']].index,
y=data[data['Bearish Engulfing']]['High'] + (data['High'] - data['Low']).mean() * 0.1,
mode='markers',
name='Bearish Engulfing',
marker=dict(color='red', size=10, symbol='triangle-down')
))
# Format date range
start_date = data.index.min().strftime('%Y-%m-%d')
end_date = data.index.max().strftime('%Y-%m-%d')
# Update layout
fig.update_layout(
title=f'{stock_symbol} Stock Price from {start_date} to {end_date} with Anomalies, Support, and Resistance ({interval})',
xaxis_title='Date',
yaxis_title='Stock Price',
xaxis_rangeslider_visible=False, # Hide range slider
xaxis_tickformat='%H:%M', # Format x-axis to show hours and minutes
)
# Display the chart
st.plotly_chart(fig)
# News section
st.header(f"Recent {stock_symbol} News")
# Fetch and display news
news_items = fetch_stock_news(stock_symbol)
if not news_items:
st.write("No news found.")
else:
for item in news_items:
st.subheader(item['title'])
st.write(f"Published: {item['publishedAt']}")
st.write(f"[Read more]({item['url']})")
st.write("---")
# Add engulfing patterns analysis
st.header("Engulfing Patterns Analysis")
bullish_engulfing_count = data['Bullish Engulfing'].sum()
bearish_engulfing_count = data['Bearish Engulfing'].sum()
st.write(f"Number of Bullish Engulfing patterns: {bullish_engulfing_count}")
st.write(f"Number of Bearish Engulfing patterns: {bearish_engulfing_count}")
if bullish_engulfing_count > bearish_engulfing_count:
st.write("There are more Bullish Engulfing patterns, which might indicate a potential upward trend.")
elif bearish_engulfing_count > bullish_engulfing_count:
st.write("There are more Bearish Engulfing patterns, which might indicate a potential downward trend.")
else:
st.write("The number of Bullish and Bearish Engulfing patterns is equal, suggesting no clear trend direction based on these patterns alone.")
# Predict the next interval's return
st.header("Prediction")
def predict_next_return(data_returns, lorentzian_distances):
if len(data_returns) < 2:
st.warning("Not enough data to predict the next interval's return.")
return 0
recent_distance = lorentzian_distance(data_returns[-2], data_returns[-1])
if threshold and recent_distance > threshold:
st.warning("Anomaly detected. Predicted return may be highly volatile.")
else:
st.info("No anomaly detected. Predicted return is based on historical average.")
return data_returns.mean()
predicted_return = predict_next_return(data_returns, lorentzian_distances)
st.write(f"Predicted next interval's return: {predicted_return:.4f}")
if __name__ == "__main__":
main()