-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscipy_webscraper.py
112 lines (85 loc) · 3.68 KB
/
scipy_webscraper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
from pathlib import Path
import requests
import csv
import os
import os.path
import scraper_plotter as scraper
'''
This program constantly runs in the background to check for changes in the file
size. A change in file size means our Telegram bot received input and is
writing the input in our txt file. This program than creates the according plots
and saves them as png.
Personal working directory has to be set!
'''
### Set your current working directory ###
os.chdir("C:/Users/bad42/Desktop/scientificpython")
# Checks for current working directory
print("The current working directory (scraper) is:" + os.getcwd())
# Showing the program has started
print("Scanning text file...")
# variables to safe our file sizes in
global old_size
old_size = 0
global new_size
new_size = Path("inputs.txt").stat().st_size
'''
Loop to check for changes in file size
'''
while(True):
'''
Endless loop which constantly checks for new input in the txt file, which is
written by the Telegram bot.
When a user gives input the file size changes and the if statement will be
called. The lines of the file will be read in as a list and depending on the
input the corresponding function will be called and the right plot will be
saved in the folder in which our program resides.
'''
# updating our file size in every iteration
old_size = new_size
new_size = Path("inputs.txt").stat().st_size
# if the size of the file changes, the txt file is read and a function is
# called with its last entry
if old_size != new_size:
lis = list(csv.reader(open('inputs.txt')))
# To make sure our index is not out of bounds with the first entry
if len(lis) == 1:
last_line = lis[0]
last_line = str(last_line[0])
last_line_list = last_line.split()
else:
last_line = lis[len(lis)-1]
last_line = str(last_line[0])
last_line_list = last_line.split()
# Checking for the first word and choose the correspoinding plot
# Functions have standart parameters, to if not all parameters are called
# with the user input, the standart parameters are used.
if last_line_list[0] == "simple":
if len(last_line_list) == 1:
scraper.simple()
if len(last_line_list) == 2:
scraper.simple(t=last_line_list[1])
if len(last_line_list) == 3:
scraper.simple(t=last_line_list[1], p1=last_line_list[2])
if len(last_line_list) == 4:
scraper.simple(t=last_line_list[1], p1=last_line_list[2], p2=last_line_list[3])
elif last_line_list[0] == "regression":
if len(last_line_list) == 1:
scraper.regression()
if len(last_line_list) == 2:
scraper.regression(t=last_line_list[1])
if len(last_line_list) == 3:
scraper.regression(t=last_line_list[1], p1=last_line_list[2])
if len(last_line_list) == 4:
scraper.regression(t=last_line_list[1], p1=last_line_list[2], p2=last_line_list[3])
elif last_line_list[0] == "detailed":
if len(last_line_list) == 1:
scraper.full_graph()
if len(last_line_list) == 2:
scraper.full_graph(t=last_line_list[1])
if len(last_line_list) == 3:
scraper.full_graph(t=last_line_list[1], p1=last_line_list[2])
if len(last_line_list) == 4:
scraper.full_graph(t=last_line_list[1], p1=last_line_list[2], p2=last_line_list[3])
# with the word "close" we can terminate our scraper
elif last_line_list[0] == "close":
exit()