-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain.py
103 lines (87 loc) · 3.68 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import os
from crewai import Agent, Task, Crew, Process
from textwrap import dedent
from tools.scraper_tools import ScraperTool
openai_api_key = os.getenv("OPENAI_API_KEY")
if not openai_api_key:
raise ValueError("Please set the OPENAI_API_KEY environment variable.")
os.environ["OPENAI_API_KEY"] = openai_api_key
#### CURRENT TOOLS AVAILABLE WITHIN CREW AI
##---------------------------------------##
#### SerperDevTool: Google Serper Search
#### ScrapeWebsiteTool: Scrape Website
#### DirectoryReadTool: Directory Read
#### FileReadTool: File Read
#### SeleniumScrapingTool: Selenium Scraper
#### DirectorySearchTool: Directory RAG Search
#### PDFSearchTool: PDF RAG Search
#### TXTSearchTool: TXT RAG Search
#### CSVSearchTool: CSV RAG Search
#### XMLSearchTool: XML RAG Search
#### JSONSearchTool: JSON RAG Search
#### DOCXSearchTool: DOCX RAG Search
#### MDXSearchTool: MDX RAG Search
#### PGSearchTool: PG RAG Search
#### WebsiteSearchTool: Website RAG Search
#### GitHubSearchTool: Github RAG Search
#### YoutubeVideoSearchTool: Youtube Video RAG Search
#### YoutubeChannelSearchTool: Youtube Channel RAG Search
scrape_tool = ScraperTool().scrape
# Define your agents with roles and goals
class NewsletterCrew:
def __init__(self, urls):
self.urls = urls
def run(self):
scraper = Agent(
role="Summarizer of Websites",
goal="Ask the user for a list of URLs, then use the WebsiteSearchTool to then scrape the content, and provide the full content to the writer agent so it can then be summarized",
backstory="""You work at a leading tech think tank.
Your expertise is taking URLs and getting just the text-based content of them.""",
verbose=True,
allow_delegation=False,
tools=[scrape_tool],
)
writer = Agent(
role="Tech Content Summarizer and Writer",
goal="Craft compelling short-form content on AI advancements based on long-form text passed to you",
backstory="""You are a renowned Content Creator, known for your insightful and engaging articles.
You transform complex concepts into compelling narratives.""",
verbose=True,
allow_delegation=True,
)
# Create tasks for your agents
task1 = Task(
description=f"""Take a list of websites that contain AI content, read/scrape the content and then pass it to the writer agent
here are the URLs from the user that you need to scrape: {self.urls}""",
agent=scraper,
expected_output="A string or object that represents what the task is expected to produce",
)
task2 = Task(
description="""Using the text provided by the scraper agent, develop a short and compelling/interesting short-form summary of the
text provided to you about AI""",
agent=writer,
expected_output="A string or object that represents what the task is expected to produce",
)
# Instantiate your crew with a sequential process
NewsletterCrew = Crew(
agents=[scraper, writer],
tasks=[task1, task2],
verbose=2, # You can set it to 1 or 2 to different logging levels
)
NewsletterCrew.kickoff()
if __name__ == "__main__":
print("## Welcome to Newsletter Writer")
print("-------------------------------")
urls = input(
dedent(
"""
What is the URL you want to summarize?
"""
)
)
newsletter_crew = NewsletterCrew(urls)
result = newsletter_crew.run()
print("\n\n########################")
print("## Here is the Result")
print("########################\n")
print(result)