Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Team catapult #12

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 4 additions & 24 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,26 +1,6 @@
# Empowering-Investors-Hackathon
# SkepTICK
Your financial analysis plugin

## Submission Instruction:
1. Fork this repository
2. Create a folder with your Team Name
3. Upload all the code and necessary files in the created folder
4. Upload a **README.md** file in your folder with the below mentioned informations.
5. Generate a Pull Request with your Team Name. (Example: submission-XYZ_team)

## README.md must consist of the following information:

#### Team Name -
#### Problem Statement -
#### Team Leader Email -

## A Brief of the Prototype:
This section must include UML Diagrams and prototype description

## Tech Stack:
List Down all technologies used to Build the prototype

## Step-by-Step Code Execution Instructions:
This Section must contain a set of instructions required to clone and run the prototype so that it can be tested and deeply analyzed

## What I Learned:
Write about the biggest learning you had while developing the prototype
# Backend service
- `uvicorn main:app --reload`
Binary file added __pycache__/main.cpython-310.pyc
Binary file not shown.
Binary file added app/__pycache__/helper.cpython-310.pyc
Binary file not shown.
Binary file added app/agents/__pycache__/agent.cpython-310.pyc
Binary file not shown.
Binary file added app/agents/__pycache__/tools.cpython-310.pyc
Binary file not shown.
73 changes: 73 additions & 0 deletions app/agents/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
"""here we define our agent class and their actions with tools
"""

from app.agents.tools import Tools
from langchain.agents import AgentType
from langchain.agents import AgentExecutor # will be used for custom agents
from langchain.agents import initialize_agent
from langchain.callbacks.base import BaseCallbackHandler
from typing import Dict, Union, Any, List
from langchain.schema import AgentAction
import streamlit as st
import time


class MyCustomHandlerOne(BaseCallbackHandler):

def __init__(self):
self.flag=True

def on_llm_start(
self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
) -> Any:
# DUMMY OUTPUT for demo
print(f"on_llm_start {serialized['name']}")


def on_llm_new_token(self, token: str, **kwargs: Any) -> Any:
print(f"on_new_token {token}")

def on_llm_error(
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
) -> Any:
"""Run when LLM errors."""

def on_chain_start(
self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
) -> Any:
print(f"on_chain_start {serialized['name']}")

def on_tool_start(
self, serialized: Dict[str, Any], input_str: str, **kwargs: Any
) -> Any:
print(f"on_tool_start {serialized['name']}")
st.write(f"Identified tool {serialized['name']}")

def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any:
print(f"on_agent_action {action}")

class ActionAgent():

def __init__(self,llm):
self.llm = llm
self.agent = self.create_agent()

def create_agent(self):
"""
Returns: AgentExecutor

AgnetType-> Zero-shot means the agent functions on the current action only — it has no memory.
ReAcT - Reasoning and Action steps, that LLM could cycle through
Enabling a multi-step process for identifying answers.
"""
agent_executor = initialize_agent(self.fetch_tools(), self.llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True,max_iterations=3)
return agent_executor

def fetch_tools(self):
self.tools= Tools(self.llm).list_tools()
return self.tools

def run(self, input):
handler1 = MyCustomHandlerOne()
#return self.agent.run(input=input,callbacks=[handler1])
return self.agent.run(input=input)
226 changes: 226 additions & 0 deletions app/agents/tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
"""here we define tools that are needed by an agent
"""
from langchain.agents import load_tools
from langchain.agents.tools import Tool
from langchain.utilities import PythonREPL
from pydantic import BaseModel, Field
from langchain import LLMMathChain,SerpAPIWrapper
from langchain.tools import ShellTool
from langchain.chat_models import ChatOpenAI
from langchain.schema import (
HumanMessage,
SystemMessage
)
import json
import requests
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from typing import List
import streamlit as st
import yfinance as yf
from yahooquery import Ticker
import os
from dotenv import load_dotenv
import openai
from openai import ChatCompletion
from langchain.prompts import ChatPromptTemplate
from langchain.prompts.chat import SystemMessage, HumanMessagePromptTemplate
from langchain.chat_models import ChatOpenAI

load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")

# Defining schema for llm math chain as an example
class CalculatorInput(BaseModel):
question: str = Field()

# class FhirObservationInput(BaseModel):
# input_data: List[str] = Field(description="should be a list, with hypothesis data and pateint ID")

class FhirObservationInput(BaseModel):
input_data: str = Field(description="hypothesis data")

class CreatePatientSummaryInput(BaseModel):
pass



class Tools():

def __init__(self,llm) :
self.llm = llm

self.tools = [
Tool(
name="Terminal",
description="A terminal. Use this to execute shell commands on this MacOS machine. Input should be a valid shell command. For example, `ls`.",
func=self.shell_tool()
),
Tool(
name="python_repl",
description="A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`.",
func=self.python_repl()
),
Tool(
name="Search",
func=self.search_tool(),
description="useful for when you need to answer questions about current events, use it only to get TICKER data. Input should be the company name. For example, `Apple`.",
),
Tool(
name="Company news",
func=self.get_company_news,
description="Use this to get news about a company. Input should be the company name. For example, `Apple`.",
)
,
Tool(
name="Get Stock history",
func=self.get_stock_data,
description="Get historical market data for a company stock given ticker symbol, example input `AAPL`",


),
Tool(
name="Get Stock Analysis",
func=self.get_stock_analysis,
description="Get stock analysis for a company stock given ticker symbol, example input `AAPL`",
return_direct=True

)

]

def shell_tool(self):
shell_tool = ShellTool()

#have to retun constructor
return shell_tool.run

def python_repl(self):
python_repl = PythonREPL()

return python_repl.run

def search_tool(self):
"""Use this function to ONLY seach the web for getting TICKER SYMBOLS. Input should be the company name. For example, `Apple`."""
search = SerpAPIWrapper()
return search.run

def get_company_news(self,company_name):
"""Use this funciton ONLY to get company news. Input should be the company name. For example, `Apple`."""
params = {
"engine": "google",
"tbm": "nws",
"q": company_name,
"api_key": os.environ["SERPAPI_API_KEY"],
}

response = requests.get('https://serpapi.com/search', params=params)
data = response.json()

filename="app/assets/investment.txt"
news = data.get('news_results')

if news:
self.write_news_to_file(news, filename)

else:
print("No news found.")



def write_news_to_file(self,news, filename):
with open(filename, 'w') as file:
for news_item in news:
if news_item is not None:
title = news_item.get('title', 'No title')
link = news_item.get('link', 'No link')
date = news_item.get('date', 'No date')
file.write(f"Title: {title}\n")
file.write(f"Link: {link}\n")
file.write(f"Date: {date}\n\n")


def get_stock_evolution(self,company_name, period="1y"):
# Get the stock information
company_name = "GUJTHEM.BO"
stock = yf.Ticker(company_name)

# Get historical market data
hist = stock.history(period=period)

# Convert the DataFrame to a string with a specific format
data_string = hist.to_string()

# Append the string to the "investment.txt" file
with open("app/assets/investment.txt", "a") as file:
file.write(f"\nStock Evolution for {company_name}:\n")
file.write(data_string)
file.write("\n")

return hist


def get_financial_statements(self,ticker):
# Create a Ticker object
ticker = "GUJTHEM.BO"
company = Ticker(ticker)

# Get financial data
balance_sheet = company.balance_sheet().to_string()
cash_flow = company.cash_flow(trailing=False).to_string()
income_statement = company.income_statement().to_string()
valuation_measures = str(company.valuation_measures) # This one might already be a dictionary or string

# Write data to file
with open("app/assets/investment.txt", "a") as file:
file.write("\nBalance Sheet\n")
file.write(balance_sheet)
file.write("\nCash Flow\n")
file.write(cash_flow)
file.write("\nIncome Statement\n")
file.write(income_statement)
file.write("\nValuation Measures\n")
file.write(valuation_measures)


def get_stock_data(self, company_ticker):


hist = self.get_stock_evolution(company_ticker)
self.get_financial_statements(company_ticker)
return hist

def get_stock_analysis(self,company_ticker):
with open("app/assets/investment.txt", "r") as file:
content = file.read()[:14000]
chat = ChatOpenAI(temperature=0, model_name='gpt-4', request_timeout=120)

template = ChatPromptTemplate.from_messages(
[
SystemMessage(
content=(
"""Write a detailed investment thesis to answer
the user request. Provide numbers to justify
your assertions, a lot ideally. Always provide
a recommendation to buy the stock of the company
or not given the information available.
Never mention something like this:
However, it is essential to consider your own risk
tolerance, financial goals, and time horizon before
making any investment decisions. It is recommended
to consult with a financial advisor or do further
research to gain more insights into the company's
fundamentals and market trends. The user already knows that"""
)
),
HumanMessagePromptTemplate.from_template("This is ticker symbol {ticker},this is news {news}"),
]
)

response = chat(template.format_messages(ticker=company_ticker, news=content))

return response.content

def list_tools(self):
return self.tools

Binary file added app/api/__pycache__/root.cpython-310.pyc
Binary file not shown.
14 changes: 14 additions & 0 deletions app/api/root.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from fastapi import APIRouter

root_router = APIRouter()


@root_router.get("/")
def read_root():
return {"About": "Hackathon API"}


@root_router.get("/health")
def get_health():
"""To check the service health"""
return {"status": "ok"}
Empty file added app/api/v1/__init__.py
Empty file.
Binary file added app/api/v1/__pycache__/__init__.cpython-310.pyc
Binary file not shown.
Binary file added app/api/v1/__pycache__/api.cpython-310.pyc
Binary file not shown.
11 changes: 11 additions & 0 deletions app/api/v1/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from fastapi import APIRouter

from app.api.v1.endpoints import transcribe
from app.api.v1.endpoints import stock_tips
from app.api.v1.endpoints import wholetruth

v1_router = APIRouter()

v1_router.include_router(transcribe.router, prefix="/v1")
v1_router.include_router(stock_tips.router, prefix="/v1")
v1_router.include_router(wholetruth.router, prefix="/v1")
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading