Skip to content

Commit

Permalink
chore: Update pip installation command and requirements, add new depe…
Browse files Browse the repository at this point in the history
…ndencies
  • Loading branch information
unclecode committed May 17, 2024
1 parent a317dc5 commit bf3b040
Showing 1 changed file with 57 additions and 21 deletions.
78 changes: 57 additions & 21 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,12 @@
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
import subprocess
import sys

class InstallCommand(_install):
def run(self):
# Run the standard install first
_install.run(self)
# Now handle the dependencies manually
self.manual_dependencies_install()

def manual_dependencies_install(self):
with open('requirements.txt') as f:
dependencies = f.read().splitlines()
for dependency in dependencies:
subprocess.check_call([sys.executable, '-m', 'pip', 'install', dependency])

# Read the requirements from requirements.txt
with open("requirements.txt") as f:
requirements = f.read().splitlines()

setup(
name="Crawl4AI",
version="0.1.0",
version="0.1.2",
description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & Scrapper",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
Expand All @@ -27,10 +15,7 @@ def manual_dependencies_install(self):
author_email="[email protected]",
license="MIT",
packages=find_packages(),
install_requires=[], # Leave this empty to avoid default dependency resolution
cmdclass={
'install': InstallCommand,
},
install_requires=requirements,
entry_points={
'console_scripts': [
'crawl4ai-download-models=crawl4ai.model_loader:main',
Expand All @@ -48,3 +33,54 @@ def manual_dependencies_install(self):
],
python_requires=">=3.7",
)

from setuptools import setup, find_packages
from setuptools.command.install import install as _install
import subprocess
import sys

class InstallCommand(_install):
def run(self):
# Run the standard install first
_install.run(self)
# Now handle the dependencies manually
self.manual_dependencies_install()

def manual_dependencies_install(self):
with open('requirements.txt') as f:
dependencies = f.read().splitlines()
for dependency in dependencies:
subprocess.check_call([sys.executable, '-m', 'pip', 'install', dependency])

# setup(
# name="Crawl4AI",
# version="0.1.0",
# description="🔥🕷️ Crawl4AI: Open-source LLM Friendly Web Crawler & Scrapper",
# long_description=open("README.md").read(),
# long_description_content_type="text/markdown",
# url="https://github.com/unclecode/crawl4ai",
# author="Unclecode",
# author_email="[email protected]",
# license="MIT",
# packages=find_packages(),
# install_requires=[], # Leave this empty to avoid default dependency resolution
# cmdclass={
# 'install': InstallCommand,
# },
# entry_points={
# 'console_scripts': [
# 'crawl4ai-download-models=crawl4ai.model_loader:main',
# ],
# },
# classifiers=[
# "Development Status :: 3 - Alpha",
# "Intended Audience :: Developers",
# "License :: OSI Approved :: Apache Software License",
# "Programming Language :: Python :: 3",
# "Programming Language :: Python :: 3.7",
# "Programming Language :: Python :: 3.8",
# "Programming Language :: Python :: 3.9",
# "Programming Language :: Python :: 3.10",
# ],
# python_requires=">=3.7",
# )

0 comments on commit bf3b040

Please sign in to comment.