-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathBot.py
More file actions
100 lines (77 loc) · 3.03 KB
/
Bot.py
File metadata and controls
100 lines (77 loc) · 3.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import tweepy
import time
import requests
# List of URLs to fetch data from
urls = [
"https://nairobi-stock-exchange-nse.p.rapidapi.com/stocks/Absa Bank Kenya Plc",
"https://nairobi-stock-exchange-nse.p.rapidapi.com/stocks/Safaricom",
"https://nairobi-stock-exchange-nse.p.rapidapi.com/stocks/KCB Group Limited",
"https://nairobi-stock-exchange-nse.p.rapidapi.com/stocks/Equity Group Holdings Limited",
"https://nairobi-stock-exchange-nse.p.rapidapi.com/stocks/Co-operative Bank of Kenya Limited",
"https://nairobi-stock-exchange-nse.p.rapidapi.com/stocks/Kenya Power & Lighting Company",
# Add more URLs here as needed
]
# Common headers for all requests
headers = {
"X-RapidAPI-Key": "be623b818cmsh7d25d2e33a15fccp15ee38jsne2bef3c5c029",
"X-RapidAPI-Host": "nairobi-stock-exchange-nse.p.rapidapi.com"
}
# Open tweets.txt file in append mode
with open('tweets.txt', 'a') as f:
# Loop through each URL
for url in urls:
# Fetch data from the URL
response = requests.get(url, headers=headers)
# Check if the request was successful (status code 200)
if response.status_code == 200:
data = response.json()
if data:
item = data[0]
stock_name = item.get('name')
volume = item.get('volume')
price = item.get('price')
change = item.get('change')
# Format the tweet-like string
tweet = f"Company: {stock_name}\nStock Name: {stock_name}\nVolume: {volume}\nPrice: {price}\nChange: {change}\n\n"
# Write the tweet to tweets.txt file
f.write(tweet)
f.write('\n')
print("Data updated for:", stock_name)
else:
print("No data found in response:", url)
else:
print("Failed to fetch data from:", url)
# (Your Twitter API Credentials here)
consumer_key = ""
consumer_secret = ""
access_token = "-"
access_token_secret = ""
# Initialize the Twitter client
client = tweepy.Client(
consumer_key=consumer_key,
consumer_secret=consumer_secret,
access_token=access_token,
access_token_secret=access_token_secret
)
# Path to the text file containing tweets
tweet_file_path = "tweets.txt"
def read_tweets_without_blanks():
"""
Reads tweets from the file, removes blank lines, and returns a list.
"""
with open(tweet_file_path, "r") as file:
tweets = file.readlines()
return [tweet.strip() for tweet in tweets if tweet.strip()] # Filter blank lines
# Read tweets and remove blanks
tweets = read_tweets_without_blanks()
for i in range(0, len(tweets), 5):
try:
tweet_text = "\n".join(tweets[i:i+5])
response = client.create_tweet(text=tweet_text)
print(f"Tweet posted: {tweet_text}")
except tweepy.TweepError as e:
print(f"Error posting tweet: {e}")
time.sleep(10)
with open(tweet_file_path, "w") as file:
file.write("") # Write an empty string to clear the file
print("All tweets posted and tweets.txt cleared!")