Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
Developer-14 authored Feb 22, 2022
1 parent c6f20c2 commit 85e4326
Show file tree
Hide file tree
Showing 27 changed files with 1,352 additions and 0 deletions.
102 changes: 102 additions & 0 deletions BuyingModule/celery_task.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import os
import time
from datetime import datetime, timedelta

import pytz
from celery import Celery
from celery.schedules import crontab
from dotenv import load_dotenv

from DaoModule.Dao import Dao
from EmailModule.sender import EmailSender
from TimeModule.TimeModule import TimeModule
from TokenManagementModule.Receiver import Receiver
from main import buy_check

load_dotenv()
broker = os.getenv('BROKER')

app = Celery(
'simple_worker',
broker=broker,
backend='db+sqlite:///results.db'
)


@app.task(name='check_nft_data')
def check_nft_data():
token_ids = Dao.get_drop_ids()
for token in token_ids:
scrap_token.apply_async(eta=datetime.now(pytz.utc),
kwargs={
"token": token,
})
return True


app.conf.beat_schedule = {
'checking_the_nft_data': {
'task': 'check_nft_data',
'schedule': crontab(minute=30, hour=0),
# 'args': (16, 16)
},
}
app.conf.timezone = 'UTC'


@app.task
def scrap_token(**kwargs):
try:
token = kwargs['token']
receiver = Receiver()
receiver.scrape_token_info(token)
except Exception as err:
EmailSender(err, 'Error', '[email protected]')


@app.task
def buy_nft(**kwargs):
try:
token = kwargs['token']
credentials = kwargs['credentials']
task_id = kwargs['task_id']
to_name = kwargs['to_name']
print(token)
print(credentials)
print(task_id)
response_status, response_message = buy_check(
login=credentials['login'],
password=credentials['password'],
drop_id=token['drop_id'],
to_name=to_name,
referer='atomichub',
country='US',
delphi_median=0,
buying_time=token['start_time'], #or TimeModule.return_current_unix_time(), #TimeModule.return_current_unix_time() + 50000,
currency=token['currency']
)
print(response_status)
print(response_message)
EmailSender(response_message,
f'Drop #{token["drop_id"]}: {response_status}, {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}',
'[email protected]'
)
if response_status == 'Error':
Dao.update_task(task_id, datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'Error', response_message)
else:
Dao.update_task(task_id, datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'Success', "")
except Exception as err:
EmailSender(err, 'Error', '[email protected]')


@app.task
def receive_task(**kwargs):
try:
login = kwargs['login']
password = kwargs['password']
drops = kwargs['drops']
to_name = kwargs['to_name']
receiver = Receiver()
receiver.receive_data([{"login": login, "password": password}], drops, buy_nft, to_name)
except Exception as err:
print(err)
9 changes: 9 additions & 0 deletions BuyingModule/scheduler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from datetime import datetime, timedelta

import pytz

from BuyingModule.celery_task import buy_nft
from ScraperModule.SeleniumService.selenium_executor import SeleniumExecutor

print(datetime.now(pytz.utc))
buy_nft.apply_async(eta=datetime.now(pytz.utc)+timedelta(seconds=15), kwargs={"executor": 'run'})
126 changes: 126 additions & 0 deletions DaoModule/Dao.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
import os
from datetime import datetime
from dateutil.parser import parse

import requests
from dotenv import load_dotenv

load_dotenv()
flask_url = os.getenv('FLASK_HOST')

class Dao:

@staticmethod
def get_median():
response = requests.get(flask_url+"median/")
response_json = response.json()
median = response_json['result'][0]['price']
return median

@staticmethod
def get_tokens():
response = requests.get(flask_url + "tokens/")
response_json = response.json()
return response_json['result']

@staticmethod
def add_account(login, password):
r = requests.post(flask_url + 'add-account/', json={
"login": login,
"password": password,
})
return r.json()['account']

@staticmethod
def add_task(account_id, token_id, added_time, perform_time, status, error):
r = requests.post(flask_url + 'add-task/', json={
"account_id": account_id,
"token_id": token_id,
"added_time": added_time,
"perform_time": perform_time,
"status": status,
"error": error,
})
return r.json()['task']

@staticmethod
def update_task(task_id, perform_time, status, error):
r = requests.put(flask_url + 'update-task/', json={
"task_id": task_id,
"perform_time": perform_time,
"status": status,
"error": error,
})
return r.json()["success"]

@staticmethod
def add_token(drop_id, collection_name, price, currency, settlement_symbol, price_recipient,
fee_rate, auth_required, account_limit, account_limit_cooldown, max_claimable,
current_claimed, start_time, end_time):
r = requests.post(flask_url + 'add-token/', json={
"drop_id": drop_id,
"collection_name": collection_name,
"price": price,
"currency": currency,
"settlement_symbol": settlement_symbol,
"price_recipient": price_recipient,
"fee_rate": fee_rate,
"auth_required": auth_required,
"account_limit": account_limit,
"account_limit_cooldown": account_limit_cooldown,
"max_claimable": max_claimable,
"current_claimed": current_claimed,
"start_time": datetime.utcfromtimestamp(start_time).strftime('%Y-%m-%d %H:%M:%S'),
"end_time": datetime.utcfromtimestamp(end_time).strftime('%Y-%m-%d %H:%M:%S'),
"scraped_time": datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
})
return r.json()["token"]

@staticmethod
def get_history_data(token_id, number_of_days):
r = requests.post(flask_url + 'plot-data/', json={
"token_id": token_id,
"number_of_days": number_of_days
})
print(f"Status Code: {r.status_code}, Response: {r}")
return r.json()["result"][::-1]

@staticmethod
def get_clean_historical_data(drop_id, number_of_days=30):
data = Dao.get_history_data(drop_id, number_of_days)
print(data)
labels = []
values = []
for elem in data:
labels.append(parse(elem['add_time']).strftime('%m-%d-%Y'))
values.append(elem['price'])
return labels, values

@staticmethod
def get_drop_ids():
r = requests.get(flask_url + '/get-nft-ids/')
token_ids = []
for token in r.json()['result']:
token_ids.append(token['drop_id'])
return token_ids



if __name__ == '__main__':
# print(Dao.add_token(
# drop_id=111,
# collection_name='TEST',
# price=112.2,
# currency='WAX',
# settlement_symbol='wax',
# price_recipient='myself',
# fee_rate=0.222,
# auth_required=0,
# account_limit=1,
# account_limit_cooldown=2,
# max_claimable=100,
# current_claimed=10,
# start_time=None,
# end_time='2021-12-03 17:12:00.500000'
# ))
print(Dao.get_history_data(1, 30))
18 changes: 18 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FROM joyzoursky/python-chromedriver:3.8

RUN apt-get update
RUN pip install --upgrade pip

COPY ./requirements.txt /app/requirements.txt

WORKDIR /app

ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1

RUN pip install -r requirements.txt

COPY . .

RUN export PYTHONPATH=/app

32 changes: 32 additions & 0 deletions EmailModule/sender.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import os

from dotenv import load_dotenv
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail


class EmailSender:
def __init__(self, body=None, subject=None, to_email=None):
load_dotenv()
self.from_email = os.getenv('SENDGRID_SENDER')
if body and subject and to_email:
self.send_message(body, subject, to_email)

def send_message(self, body, subject, to_email):
message = Mail(
from_email=self.from_email,
to_emails=to_email,
subject=f'{subject}',
html_content=f'<p>{body}</p>')
try:
sg = SendGridAPIClient(os.getenv('SENDGRID_API_KEY'))
response = sg.send(message)
print(response.status_code)
print(response.body)
print(response.headers)
except Exception as e:
print(e.body)


if __name__ == '__main__':
EmailSender('body', 'subject', '[email protected]')
5 changes: 5 additions & 0 deletions README
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
CELERY RUN:
celery -A BuyingModule.celery_task worker --loglevel=info
celery -A BuyingModule.celery_task beat --loglevel=info

docker-compose up --build -d
63 changes: 63 additions & 0 deletions ScraperModule/AtomicHub.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import time

import pandas as pd
import requests

from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait

from ScraperModule.WaxCloud import login_wax_cloud
from ScraperModule.SeleniumService.selenium_executor import SeleniumExecutor


class AtomicHub:
def __init__(self, selenium_executor, login_wax_cloud):
self.selenium_executor = selenium_executor
self.driver = selenium_executor.get_driver()
self.login_wax_cloud = login_wax_cloud

def login_to_atomichub(self, wax_login, wax_password):
# https://wax.atomichub.io/ login
selenium_executor = SeleniumExecutor()
selenium_executor.open_url(self.driver, 'https://wax.atomichub.io/')
WebDriverWait(self.driver, 20).until(expected_conditions.presence_of_element_located(
(By.XPATH, '/html/body/div[3]/div/div/div/div[2]/button[1]')))
accept_cookie_button = self.driver.find_element(By.XPATH, '/html/body/div[3]/div/div/div/div[2]/button[1]')
accept_cookie_button.click()
WebDriverWait(self.driver, 20).until(
expected_conditions.presence_of_element_located((By.XPATH, '//*[@id="root"]/nav/div/div[4]/div/button')))
login_button = self.driver.find_element(By.XPATH, '//*[@id="root"]/nav/div/div[4]/div/button')
login_button.click()
WebDriverWait(self.driver, 20).until(
expected_conditions.presence_of_element_located(
(By.XPATH, '/html/body/div[3]/div/div/div[2]/div/div[1]/div[1]/div[1]/button')))
wax_button = self.driver.find_element(By.XPATH,
'/html/body/div[3]/div/div/div[2]/div/div[1]/div[1]/div[1]/button')
wax_button.click()
time.sleep(15)
handles = self.driver.window_handles
self.driver.switch_to.window(handles[1])
self.login_wax_cloud(self.driver, wax_login, wax_password)
self.driver.switch_to.window(handles[0])
print(self.driver.title)

return self.driver

def get_token_info(self, drop_id):
self.selenium_executor.open_url(self.driver, f'https://wax.atomichub.io/drops/{drop_id}')
table_xpath = '//*[@id="root"]/div[2]/div/div[2]/div/div/div/div[1]/div[3]/div/table'
WebDriverWait(self.driver, 20).until(expected_conditions.presence_of_element_located(
(By.XPATH, table_xpath)))
html_table = self.driver.find_element(By.XPATH, table_xpath)
df = pd.read_html(html_table.get_attribute('outerHTML'))[0]
info_dict = {}
info_dict["Name"] = df[1][0].replace('#', '')
info_dict["Seller"] = df[1][1]
info_dict[df[0][3]] = df[1][3]
return info_dict

@staticmethod
def get_token_api_info(drop_id):
response = requests.get("https://wax.api.atomicassets.io/atomicassets/v1/templates/farmersworld/260616")
return response.json()
Binary file added ScraperModule/SeleniumService/driver/chromedriver
Binary file not shown.
Loading

0 comments on commit 85e4326

Please sign in to comment.