Compare commits

..

No commits in common. "master" and "v0.4.12" have entirely different histories.

31 changed files with 366 additions and 3269 deletions

View File

@ -1,32 +1,18 @@
name: flake8 name: flake8
concurrency: on: pull_request
group: ${{ github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
push:
tags:
- "*"
branches:
- main
- master
- develop
- "release/*"
pull_request:
jobs: jobs:
flake8-lint: flake8-lint:
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
name: Lint name: Lint
steps: steps:
- name: Check out source repository - name: Check out source repository
uses: actions/checkout@v4 uses: actions/checkout@v2
- name: Set up Python environment - name: Set up Python environment
uses: actions/setup-python@v5 uses: actions/setup-python@v2
with: with:
python-version: "3.13" python-version: "3.9"
- name: flake8 Lint - name: flake8 Lint
uses: reviewdog/action-flake8@v3 uses: reviewdog/action-flake8@v3
with: with:

View File

@ -1,65 +1,50 @@
name: Tests and Codecov name: Tests and Codecov
on: on: pull_request
push:
branches:
- master
- main
- "release/*"
pull_request:
workflow_dispatch:
jobs: jobs:
run_tests: run_tests:
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy-3.10'] python-version: [3.7, 3.8, 3.9, pypy-3.7]
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5 uses: actions/setup-python@v2
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install uv python -m pip install --upgrade pip
uv pip install --system tox tox-gh-actions pip install tox tox-gh-actions
- name: Test with tox - name: Test with tox
run: tox run: tox
coverage_report: coverage_report:
needs: run_tests needs: run_tests
runs-on: ubuntu-24.04 runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v2
- name: Set up Python 3.13
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install uv python -m pip install --upgrade pip
uv pip install --system poetry pip install coverage docopt yarg requests
uv pip install --system .[dev]
- name: Calculate coverage - name: Calculate coverage
run: poetry run coverage run --source=pipreqs -m unittest discover run: coverage run --source=pipreqs -m unittest discover
- name: Create XML report - name: Create XML report
run: poetry run coverage xml run: coverage xml
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v5 uses: codecov/codecov-action@v2
with: with:
files: coverage.xml files: coverage.xml
token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true
fail_ci_if_error: false

View File

@ -1,96 +0,0 @@
ci:
autoupdate_commit_msg: "chore: update pre-commit hooks"
autofix_commit_msg: "style: pre-commit fixes"
autoupdate_schedule: quarterly
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
args: [ '--maxkb=1000' ]
- id: check-case-conflict
- id: check-merge-conflict
- id: check-symlinks
- id: check-yaml
- id: check-toml
- id: check-json
- id: debug-statements
- id: end-of-file-fixer
- id: mixed-line-ending
- id: requirements-txt-fixer
- id: trailing-whitespace
files: ".*\\.(?:tex|py)$"
args: [ --markdown-linebreak-ext=md ]
exclude: (^notebooks/|^tests/truth/)
- id: detect-private-key
- id: fix-byte-order-marker
- id: check-ast
- id: check-docstring-first
- id: debug-statements
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.10.0
hooks:
- id: python-use-type-annotations
- id: python-check-mock-methods
- id: python-no-eval
- id: rst-backticks
- id: rst-directive-colons
- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args: [ --py38-plus ]
# Notebook formatting
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.9.1
hooks:
- id: nbqa-isort
additional_dependencies: [ isort ]
- id: nbqa-pyupgrade
additional_dependencies: [ pyupgrade ]
args: [ --py38-plus ]
- repo: https://github.com/kynan/nbstripout
rev: 0.8.1
hooks:
- id: nbstripout
- repo: https://github.com/sondrelg/pep585-upgrade
rev: 'v1.0'
hooks:
- id: upgrade-type-hints
args: [ '--futures=true' ]
- repo: https://github.com/MarcoGorelli/auto-walrus
rev: 0.3.4
hooks:
- id: auto-walrus
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.30.0
hooks:
- id: check-github-workflows
- id: check-github-actions
- id: check-dependabot
- id: check-readthedocs
- repo: https://github.com/dannysepler/rm_unneeded_f_str
rev: v0.2.0
hooks:
- id: rm-unneeded-f-str
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.8.6"
hooks:
- id: ruff
types_or: [ python, pyi, jupyter ]
args: [ --fix, --show-fixes , --line-length=120 ] # --unsafe-fixes,
# Run the formatter.
- id: ruff-format
types_or: [ python, pyi, jupyter ]

View File

@ -1,7 +0,0 @@
3.13
3.12
3.11
3.10
3.9
3.8
pypy3.9-7.3.12

View File

@ -1 +0,0 @@
python 3.13 3.12 3.11 3.10 3.9 3.8 pypy3.9-7.3.12

View File

@ -61,11 +61,12 @@ Ready to contribute? Here's how to set up `pipreqs` for local development.
2. Clone your fork locally:: 2. Clone your fork locally::
$ git clone git@github.com:your_name_here/pipreqs.git $ git clone git@github.com:your_name_here/pipreqs.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv pipreqs
$ cd pipreqs/ $ cd pipreqs/
$ python setup.py develop
3. Pipreqs is developed using Poetry. Refer to the `documentation <https://python-poetry.org/docs/>`_ to install Poetry in your local environment. Next, you should install pipreqs's dependencies::
$ poetry install --with dev
4. Create a branch for local development:: 4. Create a branch for local development::
@ -75,11 +76,11 @@ Ready to contribute? Here's how to set up `pipreqs` for local development.
5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox::
$ poetry run flake8 pipreqs tests $ flake8 pipreqs tests
$ poetry run python -m unittest discover $ python setup.py test
$ poetry run tox $ tox
To test all versions of python using tox you need to have them installed and for this two options are recommended: `pyenv` or `asdf`. To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub:: 6. Commit your changes and push your branch to GitHub::
@ -98,7 +99,7 @@ Before you submit a pull request, check that it meets these guidelines:
2. If the pull request adds functionality, the docs should be updated. Put 2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the your new functionality into a function with a docstring, and add the
feature to the list in README.rst. feature to the list in README.rst.
3. The pull request should work for currently supported Python and PyPy versions. Check 3. The pull request should work for Python 3.7 to 3.11, and PyPy. Check
https://travis-ci.org/bndr/pipreqs/pull_requests and make sure that the https://travis-ci.org/bndr/pipreqs/pull_requests and make sure that the
tests pass for all supported Python versions. tests pass for all supported Python versions.
@ -107,4 +108,4 @@ Tips
To run a subset of tests:: To run a subset of tests::
$ poetry run python -m unittest tests.test_pipreqs $ python -m unittest tests.test_pipreqs

13
MANIFEST.in Normal file
View File

@ -0,0 +1,13 @@
include AUTHORS.rst
include CONTRIBUTING.rst
include HISTORY.rst
include LICENSE
include README.rst
include pipreqs/stdlib
include pipreqs/mapping
recursive-include tests *
recursive-exclude * __pycache__
recursive-exclude * *.py[co]
recursive-include docs *.rst conf.py Makefile make.bat stdlib mapping

View File

@ -6,14 +6,13 @@ help:
@echo "clean-pyc - remove Python file artifacts" @echo "clean-pyc - remove Python file artifacts"
@echo "clean-test - remove test and coverage artifacts" @echo "clean-test - remove test and coverage artifacts"
@echo "lint - check style with flake8" @echo "lint - check style with flake8"
@echo "test - run tests quickly using the default Python" @echo "test - run tests quickly with the default Python"
@echo "test-all - run tests on every Python version with tox" @echo "test-all - run tests on every Python version with tox"
@echo "coverage - check code coverage quickly with the default Python" @echo "coverage - check code coverage quickly with the default Python"
@echo "docs - generate Sphinx HTML documentation, including API docs" @echo "docs - generate Sphinx HTML documentation, including API docs"
@echo "publish - package and upload a release" @echo "release - package and upload a release"
@echo "publish-to-test - package and upload a release to test-pypi" @echo "dist - package"
@echo "build - build the package" @echo "install - install the package to the active Python's site-packages"
@echo "install - install the dependencies into the Poetry virtual environment"
clean: clean-build clean-pyc clean-test clean: clean-build clean-pyc clean-test
@ -36,13 +35,14 @@ clean-test:
rm -fr htmlcov/ rm -fr htmlcov/
lint: lint:
poetry run flake8 pipreqs tests flake8 pipreqs tests
test: test:
poetry run python -m unittest discover pip install -r requirements.txt
python setup.py test
test-all: test-all:
poetry run tox tox
coverage: coverage:
coverage run --source pipreqs setup.py test coverage run --source pipreqs setup.py test
@ -58,14 +58,13 @@ docs:
$(MAKE) -C docs html $(MAKE) -C docs html
open docs/_build/html/index.html open docs/_build/html/index.html
publish: build release: clean
poetry publish python setup.py sdist bdist_wheel upload -r pypi
publish-to-test: build dist: clean
poetry publish --repository test-pypi python setup.py sdist
python setup.py bdist_wheel
build: clean ls -l dist
poetry build
install: clean install: clean
poetry install --with dev python setup.py install

View File

@ -2,8 +2,8 @@
``pipreqs`` - Generate requirements.txt file for any project based on imports ``pipreqs`` - Generate requirements.txt file for any project based on imports
============================================================================= =============================================================================
.. image:: https://github.com/bndr/pipreqs/actions/workflows/tests.yml/badge.svg .. image:: https://img.shields.io/travis/bndr/pipreqs.svg
:target: https://github.com/bndr/pipreqs/actions/workflows/tests.yml :target: https://travis-ci.org/bndr/pipreqs
.. image:: https://img.shields.io/pypi/v/pipreqs.svg .. image:: https://img.shields.io/pypi/v/pipreqs.svg
@ -21,18 +21,10 @@
Installation Installation
------------ ------------
.. code-block:: sh ::
pip install pipreqs pip install pipreqs
Obs.: if you don't want support for jupyter notebooks, you can install pipreqs without the dependencies that give support to it.
To do so, run:
.. code-block:: sh
pip install --no-deps pipreqs
pip install yarg==0.1.9 docopt==0.6.2
Usage Usage
----- -----
@ -55,7 +47,6 @@ Usage
--debug Print debug information --debug Print debug information
--ignore <dirs>... Ignore extra directories, each separated by a comma --ignore <dirs>... Ignore extra directories, each separated by a comma
--no-follow-links Do not follow symbolic links in the project --no-follow-links Do not follow symbolic links in the project
--ignore-errors Ignore errors while scanning files
--encoding <charset> Use encoding parameter for file open --encoding <charset> Use encoding parameter for file open
--savepath <file> Save the list of requirements in the given file --savepath <file> Save the list of requirements in the given file
--print Output the list of requirements in the standard output --print Output the list of requirements in the standard output
@ -66,7 +57,6 @@ Usage
<compat> | e.g. Flask~=1.1.2 <compat> | e.g. Flask~=1.1.2
<gt> | e.g. Flask>=1.1.2 <gt> | e.g. Flask>=1.1.2
<no-pin> | e.g. Flask <no-pin> | e.g. Flask
--scan-notebooks Look for imports in jupyter notebook files.
Example Example
------- -------

View File

@ -1,3 +1,3 @@
__author__ = 'Vadim Kravcenko' __author__ = 'Vadim Kravcenko'
__email__ = 'vadim.kravcenko@gmail.com' __email__ = 'vadim.kravcenko@gmail.com'
__version__ = '0.4.13' __version__ = '0.4.11'

View File

@ -10,7 +10,6 @@ BeautifulSoupTests:BeautifulSoup
BioSQL:biopython BioSQL:biopython
BuildbotStatusShields:BuildbotEightStatusShields BuildbotStatusShields:BuildbotEightStatusShields
ComputedAttribute:ExtensionClass ComputedAttribute:ExtensionClass
constraint:python-constraint
Crypto:pycryptodome Crypto:pycryptodome
Cryptodome:pycryptodomex Cryptodome:pycryptodomex
FSM:pexpect FSM:pexpect
@ -36,7 +35,6 @@ Pyxides:astro_pyxis
QtCore:PySide QtCore:PySide
S3:s3cmd S3:s3cmd
SCons:pystick SCons:pystick
speech_recognition:SpeechRecognition
Shared:Zope2 Shared:Zope2
Signals:Zope2 Signals:Zope2
Stemmer:PyStemmer Stemmer:PyStemmer
@ -131,7 +129,6 @@ aios3:aio_s3
airbrake:airbrake_flask airbrake:airbrake_flask
airship:airship_icloud airship:airship_icloud
airship:airship_steamcloud airship:airship_steamcloud
airflow:apache-airflow
akamai:edgegrid_python akamai:edgegrid_python
alation:alation_api alation:alation_api
alba_client:alba_client_python alba_client:alba_client_python
@ -583,7 +580,6 @@ ctff:tff
cups:pycups cups:pycups
curator:elasticsearch_curator curator:elasticsearch_curator
curl:pycurl curl:pycurl
cv2:opencv-python
daemon:python_daemon daemon:python_daemon
dare:DARE dare:DARE
dateutil:python_dateutil dateutil:python_dateutil
@ -722,7 +718,6 @@ jaraco:jaraco.util
jinja2:Jinja2 jinja2:Jinja2
jiracli:jira_cli jiracli:jira_cli
johnny:johnny_cache johnny:johnny_cache
jose:python_jose
jpgrid:python_geohash jpgrid:python_geohash
jpiarea:python_geohash jpiarea:python_geohash
jpype:JPype1 jpype:JPype1
@ -978,7 +973,6 @@ pysynth_samp:PySynth
pythongettext:python_gettext pythongettext:python_gettext
pythonjsonlogger:python_json_logger pythonjsonlogger:python_json_logger
pyutilib:PyUtilib pyutilib:PyUtilib
pywintypes:pywin32
pyximport:Cython pyximport:Cython
qs:qserve qs:qserve
quadtree:python_geohash quadtree:python_geohash
@ -1036,10 +1030,9 @@ skbio:scikit_bio
sklearn:scikit_learn sklearn:scikit_learn
slack:slackclient slack:slackclient
slugify:unicode_slugify slugify:unicode_slugify
slugify:python-slugify
smarkets:smk_python_sdk smarkets:smk_python_sdk
snappy:ctypes_snappy snappy:ctypes_snappy
socketio:python-socketio socketio:gevent_socketio
socketserver:pies2overrides socketserver:pies2overrides
sockjs:sockjs_tornado sockjs:sockjs_tornado
socks:SocksiPy_branch socks:SocksiPy_branch
@ -1068,7 +1061,6 @@ tasksitter:cerebrod
tastypie:django_tastypie tastypie:django_tastypie
teamcity:teamcity_messages teamcity:teamcity_messages
telebot:pyTelegramBotAPI telebot:pyTelegramBotAPI
telegram:python-telegram-bot
tempita:Tempita tempita:Tempita
tenjin:Tenjin tenjin:Tenjin
termstyle:python_termstyle termstyle:python_termstyle

View File

@ -20,7 +20,6 @@ Options:
$ export HTTPS_PROXY="https://10.10.1.10:1080" $ export HTTPS_PROXY="https://10.10.1.10:1080"
--debug Print debug information --debug Print debug information
--ignore <dirs>... Ignore extra directories, each separated by a comma --ignore <dirs>... Ignore extra directories, each separated by a comma
--ignore-errors Ignore errors while scanning files
--no-follow-links Do not follow symbolic links in the project --no-follow-links Do not follow symbolic links in the project
--encoding <charset> Use encoding parameter for file open --encoding <charset> Use encoding parameter for file open
--savepath <file> Save the list of requirements in the given file --savepath <file> Save the list of requirements in the given file
@ -32,11 +31,10 @@ Options:
--clean <file> Clean up requirements.txt by removing modules --clean <file> Clean up requirements.txt by removing modules
that are not imported in project that are not imported in project
--mode <scheme> Enables dynamic versioning with <compat>, --mode <scheme> Enables dynamic versioning with <compat>,
<gt> or <no-pin> schemes. <gt> or <non-pin> schemes.
<compat> | e.g. Flask~=1.1.2 <compat> | e.g. Flask~=1.1.2
<gt> | e.g. Flask>=1.1.2 <gt> | e.g. Flask>=1.1.2
<no-pin> | e.g. Flask <no-pin> | e.g. Flask
--scan-notebooks Look for imports in jupyter notebook files.
""" """
from contextlib import contextmanager from contextlib import contextmanager
import os import os
@ -52,23 +50,14 @@ from yarg.exceptions import HTTPError
from pipreqs import __version__ from pipreqs import __version__
REGEXP = [re.compile(r"^import (.+)$"), re.compile(r"^from ((?!\.+).*?) import (?:.*)$")] REGEXP = [
DEFAULT_EXTENSIONS = [".py", ".pyw"] re.compile(r'^import (.+)$'),
re.compile(r'^from ((?!\.+).*?) import (?:.*)$')
scan_noteboooks = False ]
class NbconvertNotInstalled(ImportError):
default_message = (
"In order to scan jupyter notebooks, please install the nbconvert and ipython libraries"
)
def __init__(self, message=default_message):
super().__init__(message)
@contextmanager @contextmanager
def _open(filename=None, mode="r"): def _open(filename=None, mode='r'):
"""Open a file or ``sys.stdout`` depending on the provided filename. """Open a file or ``sys.stdout`` depending on the provided filename.
Args: Args:
@ -81,13 +70,13 @@ def _open(filename=None, mode="r"):
A file handle. A file handle.
""" """
if not filename or filename == "-": if not filename or filename == '-':
if not mode or "r" in mode: if not mode or 'r' in mode:
file = sys.stdin file = sys.stdin
elif "w" in mode: elif 'w' in mode:
file = sys.stdout file = sys.stdout
else: else:
raise ValueError("Invalid mode for file: {}".format(mode)) raise ValueError('Invalid mode for file: {}'.format(mode))
else: else:
file = open(filename, mode) file = open(filename, mode)
@ -98,21 +87,13 @@ def _open(filename=None, mode="r"):
file.close() file.close()
def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links=True, ignore_errors=False): def get_all_imports(
path, encoding=None, extra_ignore_dirs=None, follow_links=True):
imports = set() imports = set()
raw_imports = set() raw_imports = set()
candidates = [] candidates = []
ignore_dirs = [ ignore_errors = False
".hg", ignore_dirs = [".hg", ".svn", ".git", ".tox", "__pycache__", "env", "venv"]
".svn",
".git",
".tox",
"__pycache__",
"env",
"venv",
".venv",
".ipynb_checkpoints",
]
if extra_ignore_dirs: if extra_ignore_dirs:
ignore_dirs_parsed = [] ignore_dirs_parsed = []
@ -120,23 +101,19 @@ def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links
ignore_dirs_parsed.append(os.path.basename(os.path.realpath(e))) ignore_dirs_parsed.append(os.path.basename(os.path.realpath(e)))
ignore_dirs.extend(ignore_dirs_parsed) ignore_dirs.extend(ignore_dirs_parsed)
extensions = get_file_extensions()
walk = os.walk(path, followlinks=follow_links) walk = os.walk(path, followlinks=follow_links)
for root, dirs, files in walk: for root, dirs, files in walk:
dirs[:] = [d for d in dirs if d not in ignore_dirs] dirs[:] = [d for d in dirs if d not in ignore_dirs]
candidates.append(os.path.basename(root)) candidates.append(os.path.basename(root))
py_files = [file for file in files if file_ext_is_allowed(file, DEFAULT_EXTENSIONS)] files = [fn for fn in files if os.path.splitext(fn)[1] == ".py"]
candidates.extend([os.path.splitext(filename)[0] for filename in py_files])
files = [fn for fn in files if file_ext_is_allowed(fn, extensions)]
candidates += [os.path.splitext(fn)[0] for fn in files]
for file_name in files: for file_name in files:
file_name = os.path.join(root, file_name) file_name = os.path.join(root, file_name)
with open(file_name, "r", encoding=encoding) as f:
contents = f.read()
try: try:
contents = read_file_content(file_name, encoding)
tree = ast.parse(contents) tree = ast.parse(contents)
for node in ast.walk(tree): for node in ast.walk(tree):
if isinstance(node, ast.Import): if isinstance(node, ast.Import):
@ -146,7 +123,7 @@ def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links
raw_imports.add(node.module) raw_imports.add(node.module)
except Exception as exc: except Exception as exc:
if ignore_errors: if ignore_errors:
traceback.print_exc() traceback.print_exc(exc)
logging.warn("Failed on file: %s" % file_name) logging.warn("Failed on file: %s" % file_name)
continue continue
else: else:
@ -160,11 +137,11 @@ def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links
# Cleanup: We only want to first part of the import. # Cleanup: We only want to first part of the import.
# Ex: from django.conf --> django.conf. But we only want django # Ex: from django.conf --> django.conf. But we only want django
# as an import. # as an import.
cleaned_name, _, _ = name.partition(".") cleaned_name, _, _ = name.partition('.')
imports.add(cleaned_name) imports.add(cleaned_name)
packages = imports - (set(candidates) & imports) packages = imports - (set(candidates) & imports)
logging.debug("Found packages: {0}".format(packages)) logging.debug('Found packages: {0}'.format(packages))
with open(join("stdlib"), "r") as f: with open(join("stdlib"), "r") as f:
data = {x.strip() for x in f} data = {x.strip() for x in f}
@ -172,96 +149,53 @@ def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links
return list(packages - data) return list(packages - data)
def get_file_extensions(): def filter_line(line):
return DEFAULT_EXTENSIONS + [".ipynb"] if scan_noteboooks else DEFAULT_EXTENSIONS return len(line) > 0 and line[0] != "#"
def read_file_content(file_name: str, encoding="utf-8"):
if file_ext_is_allowed(file_name, DEFAULT_EXTENSIONS):
with open(file_name, "r", encoding=encoding) as f:
contents = f.read()
elif file_ext_is_allowed(file_name, [".ipynb"]) and scan_noteboooks:
contents = ipynb_2_py(file_name, encoding=encoding)
return contents
def file_ext_is_allowed(file_name, acceptable):
return os.path.splitext(file_name)[1] in acceptable
def ipynb_2_py(file_name, encoding="utf-8"):
"""
Args:
file_name (str): notebook file path to parse as python script
encoding (str): encoding of file
Returns:
str: parsed string
"""
exporter = PythonExporter()
(body, _) = exporter.from_filename(file_name)
return body.encode(encoding)
def generate_requirements_file(path, imports, symbol): def generate_requirements_file(path, imports, symbol):
with _open(path, "w") as out_file: with _open(path, "w") as out_file:
logging.debug( logging.debug('Writing {num} requirements: {imports} to {file}'.format(
"Writing {num} requirements: {imports} to {file}".format( num=len(imports),
num=len(imports), file=path, imports=", ".join([x["name"] for x in imports]) file=path,
) imports=", ".join([x['name'] for x in imports])
) ))
fmt = "{name}" + symbol + "{version}" fmt = '{name}' + symbol + '{version}'
out_file.write( out_file.write('\n'.join(
"\n".join( fmt.format(**item) if item['version'] else '{name}'.format(**item)
fmt.format(**item) if item["version"] else "{name}".format(**item) for item in imports) + '\n')
for item in imports
)
+ "\n"
)
def output_requirements(imports, symbol): def output_requirements(imports, symbol):
generate_requirements_file("-", imports, symbol) generate_requirements_file('-', imports, symbol)
def get_imports_info(imports, pypi_server="https://pypi.python.org/pypi/", proxy=None): def get_imports_info(
imports, pypi_server="https://pypi.python.org/pypi/", proxy=None):
result = [] result = []
for item in imports: for item in imports:
try: try:
logging.warning( response = requests.get(
'Import named "%s" not found locally. ' "Trying to resolve it at the PyPI server.", "{0}{1}/json".format(pypi_server, item), proxies=proxy)
item,
)
response = requests.get("{0}{1}/json".format(pypi_server, item), proxies=proxy)
if response.status_code == 200: if response.status_code == 200:
if hasattr(response.content, "decode"): if hasattr(response.content, 'decode'):
data = json2package(response.content.decode()) data = json2package(response.content.decode())
else: else:
data = json2package(response.content) data = json2package(response.content)
elif response.status_code >= 300: elif response.status_code >= 300:
raise HTTPError(status_code=response.status_code, reason=response.reason) raise HTTPError(status_code=response.status_code,
reason=response.reason)
except HTTPError: except HTTPError:
logging.warning('Package "%s" does not exist or network problems', item) logging.debug(
'Package %s does not exist or network problems', item)
continue continue
logging.warning( result.append({'name': item, 'version': data.latest_release_id})
'Import named "%s" was resolved to "%s:%s" package (%s).\n'
"Please, verify manually the final list of requirements.txt "
"to avoid possible dependency confusions.",
item,
data.name,
data.latest_release_id,
data.pypi_url,
)
result.append({"name": item, "version": data.latest_release_id})
return result return result
def get_locally_installed_packages(encoding="utf-8"): def get_locally_installed_packages(encoding=None):
packages = [] packages = {}
ignore = ["tests", "_tests", "egg", "EGG", "info"] ignore = ["tests", "_tests", "egg", "EGG", "info"]
for path in sys.path: for path in sys.path:
for root, dirs, files in os.walk(path): for root, dirs, files in os.walk(path):
@ -271,53 +205,39 @@ def get_locally_installed_packages(encoding="utf-8"):
with open(item, "r", encoding=encoding) as f: with open(item, "r", encoding=encoding) as f:
package = root.split(os.sep)[-1].split("-") package = root.split(os.sep)[-1].split("-")
try: try:
top_level_modules = f.read().strip().split("\n") package_import = f.read().strip().split("\n")
except: # NOQA except: # NOQA
# TODO: What errors do we intend to suppress here? # TODO: What errors do we intend to suppress here?
continue continue
for i_item in package_import:
if ((i_item not in ignore) and
(package[0] not in ignore)):
version = None
if len(package) > 1:
version = package[1].replace(
".dist", "").replace(".egg", "")
# filter off explicitly ignored top-level modules packages[i_item] = {
# such as test, egg, etc. 'version': version,
filtered_top_level_modules = list() 'name': package[0]
}
for module in top_level_modules:
if (module not in ignore) and (package[0] not in ignore):
# append exported top level modules to the list
filtered_top_level_modules.append(module)
version = None
if len(package) > 1:
version = package[1].replace(".dist", "").replace(".egg", "")
# append package: top_level_modules pairs
# instead of top_level_module: package pairs
packages.append(
{
"name": package[0],
"version": version,
"exports": filtered_top_level_modules,
}
)
return packages return packages
def get_import_local(imports, encoding="utf-8"): def get_import_local(imports, encoding=None):
local = get_locally_installed_packages() local = get_locally_installed_packages()
result = [] result = []
for item in imports: for item in imports:
# search through local packages if item.lower() in local:
for package in local: result.append(local[item.lower()])
# if candidate import name matches export name
# or candidate import name equals to the package name
# append it to the result
if item in package["exports"] or item == package["name"]:
result.append(package)
# removing duplicates of package/version # removing duplicates of package/version
# had to use second method instead of the previous one, result_unique = [
# because we have a list in the 'exports' field dict(t)
# https://stackoverflow.com/questions/9427163/remove-duplicate-dict-in-list-in-python for t in set([
result_unique = [i for n, i in enumerate(result) if i not in result[n + 1:]] tuple(d.items()) for d in result
])
]
return result_unique return result_unique
@ -348,7 +268,7 @@ def get_name_without_alias(name):
match = REGEXP[0].match(name.strip()) match = REGEXP[0].match(name.strip())
if match: if match:
name = match.groups(0)[0] name = match.groups(0)[0]
return name.partition(" as ")[0].partition(".")[0].strip() return name.partition(' as ')[0].partition('.')[0].strip()
def join(f): def join(f):
@ -362,9 +282,6 @@ def parse_requirements(file_):
delimiter, get module name by element index, create a dict consisting of delimiter, get module name by element index, create a dict consisting of
module:version, and add dict to list of parsed modules. module:version, and add dict to list of parsed modules.
If file ´file_´ is not found in the system, the program will print a
helpful message and end its execution immediately.
Args: Args:
file_: File to parse. file_: File to parse.
@ -372,7 +289,7 @@ def parse_requirements(file_):
OSerror: If there's any issues accessing the file. OSerror: If there's any issues accessing the file.
Returns: Returns:
list: The contents of the file, excluding comments. tuple: The contents of the file, excluding comments.
""" """
modules = [] modules = []
# For the dependency identifier specification, see # For the dependency identifier specification, see
@ -381,12 +298,9 @@ def parse_requirements(file_):
try: try:
f = open(file_, "r") f = open(file_, "r")
except FileNotFoundError: except OSError:
print(f"File {file_} was not found. Please, fix it and run again.") logging.error("Failed on file: {}".format(file_))
sys.exit(1) raise
except OSError as error:
logging.error(f"There was an error opening the file {file_}: {str(error)}")
raise error
else: else:
try: try:
data = [x.strip() for x in f.readlines() if x != "\n"] data = [x.strip() for x in f.readlines() if x != "\n"]
@ -422,8 +336,8 @@ def compare_modules(file_, imports):
imports (tuple): Modules being imported in the project. imports (tuple): Modules being imported in the project.
Returns: Returns:
set: The modules not imported in the project, but do exist in the tuple: The modules not imported in the project, but do exist in the
specified file. specified file.
""" """
modules = parse_requirements(file_) modules = parse_requirements(file_)
@ -440,8 +354,7 @@ def diff(file_, imports):
logging.info( logging.info(
"The following modules are in {} but do not seem to be imported: " "The following modules are in {} but do not seem to be imported: "
"{}".format(file_, ", ".join(x for x in modules_not_imported)) "{}".format(file_, ", ".join(x for x in modules_not_imported)))
)
def clean(file_, imports): def clean(file_, imports):
@ -488,57 +401,31 @@ def dynamic_versioning(scheme, imports):
return imports, symbol return imports, symbol
def handle_scan_noteboooks():
if not scan_noteboooks:
logging.info("Not scanning for jupyter notebooks.")
return
try:
global PythonExporter
from nbconvert import PythonExporter
except ImportError:
raise NbconvertNotInstalled()
def init(args): def init(args):
global scan_noteboooks encoding = args.get('--encoding')
encoding = args.get("--encoding") extra_ignore_dirs = args.get('--ignore')
extra_ignore_dirs = args.get("--ignore") follow_links = not args.get('--no-follow-links')
follow_links = not args.get("--no-follow-links") input_path = args['<path>']
ignore_errors = args.get("--ignore-errors")
scan_noteboooks = args.get("--scan-notebooks", False)
handle_scan_noteboooks()
input_path = args["<path>"]
if encoding is None:
encoding = "utf-8"
if input_path is None: if input_path is None:
input_path = os.path.abspath(os.curdir) input_path = os.path.abspath(os.curdir)
if extra_ignore_dirs: if extra_ignore_dirs:
extra_ignore_dirs = extra_ignore_dirs.split(",") extra_ignore_dirs = extra_ignore_dirs.split(',')
path = ( path = (args["--savepath"] if args["--savepath"] else
args["--savepath"] if args["--savepath"] else os.path.join(input_path, "requirements.txt") os.path.join(input_path, "requirements.txt"))
) if (not args["--print"]
if ( and not args["--savepath"]
not args["--print"] and not args["--force"]
and not args["--savepath"] and os.path.exists(path)):
and not args["--force"] logging.warning("requirements.txt already exists, "
and os.path.exists(path) "use --force to overwrite it")
):
logging.warning("requirements.txt already exists, " "use --force to overwrite it")
return return
candidates = get_all_imports( candidates = get_all_imports(input_path,
input_path, encoding=encoding,
encoding=encoding, extra_ignore_dirs=extra_ignore_dirs,
extra_ignore_dirs=extra_ignore_dirs, follow_links=follow_links)
follow_links=follow_links,
ignore_errors=ignore_errors,
)
candidates = get_pkg_names(candidates) candidates = get_pkg_names(candidates)
logging.debug("Found imports: " + ", ".join(candidates)) logging.debug("Found imports: " + ", ".join(candidates))
pypi_server = "https://pypi.python.org/pypi/" pypi_server = "https://pypi.python.org/pypi/"
@ -547,34 +434,23 @@ def init(args):
pypi_server = args["--pypi-server"] pypi_server = args["--pypi-server"]
if args["--proxy"]: if args["--proxy"]:
proxy = {"http": args["--proxy"], "https": args["--proxy"]} proxy = {'http': args["--proxy"], 'https': args["--proxy"]}
if args["--use-local"]: if args["--use-local"]:
logging.debug("Getting package information ONLY from local installation.") logging.debug(
"Getting package information ONLY from local installation.")
imports = get_import_local(candidates, encoding=encoding) imports = get_import_local(candidates, encoding=encoding)
else: else:
logging.debug("Getting packages information from Local/PyPI") logging.debug("Getting packages information from Local/PyPI")
local = get_import_local(candidates, encoding=encoding) local = get_import_local(candidates, encoding=encoding)
# Get packages that were not found locally
# check if candidate name is found in difference = [x for x in candidates
# the list of exported modules, installed locally if x.lower() not in [z['name'].lower() for z in local]]
# and the package name is not in the list of local module names imports = local + get_imports_info(difference,
# it add to difference proxy=proxy,
difference = [ pypi_server=pypi_server)
x
for x in candidates
if
# aggregate all export lists into one
# flatten the list
# check if candidate is in exports
x.lower() not in [y for x in local for y in x["exports"]] and
# check if candidate is package names
x.lower() not in [x["name"] for x in local]
]
imports = local + get_imports_info(difference, proxy=proxy, pypi_server=pypi_server)
# sort imports based on lowercase name of package, similar to `pip freeze`. # sort imports based on lowercase name of package, similar to `pip freeze`.
imports = sorted(imports, key=lambda x: x["name"].lower()) imports = sorted(imports, key=lambda x: x['name'].lower())
if args["--diff"]: if args["--diff"]:
diff(args["--diff"], imports) diff(args["--diff"], imports)
@ -589,9 +465,8 @@ def init(args):
if scheme in ["compat", "gt", "no-pin"]: if scheme in ["compat", "gt", "no-pin"]:
imports, symbol = dynamic_versioning(scheme, imports) imports, symbol = dynamic_versioning(scheme, imports)
else: else:
raise ValueError( raise ValueError("Invalid argument for mode flag, "
"Invalid argument for mode flag, " "use 'compat', 'gt' or 'no-pin' instead" "use 'compat', 'gt' or 'no-pin' instead")
)
else: else:
symbol = "==" symbol = "=="
@ -605,8 +480,8 @@ def init(args):
def main(): # pragma: no cover def main(): # pragma: no cover
args = docopt(__doc__, version=__version__) args = docopt(__doc__, version=__version__)
log_level = logging.DEBUG if args["--debug"] else logging.INFO log_level = logging.DEBUG if args['--debug'] else logging.INFO
logging.basicConfig(level=log_level, format="%(levelname)s: %(message)s") logging.basicConfig(level=log_level, format='%(levelname)s: %(message)s')
try: try:
init(args) init(args)
@ -614,5 +489,5 @@ def main(): # pragma: no cover
sys.exit(0) sys.exit(0)
if __name__ == "__main__": if __name__ == '__main__':
main() # pragma: no cover main() # pragma: no cover

2021
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +0,0 @@
[virtualenvs]
prefer-active-python = true

View File

@ -1,53 +0,0 @@
[project]
name = "pipreqs"
version = "0.5.0"
description = "Pip requirements.txt generator based on imports in project"
authors = [
{ name = "Vadim Kravcenko", email = "vadim.kravcenko@gmail.com" }
]
maintainers = [
{name = "Jonas Eschle", email = "jonas.eschle@gmail.com"}
]
license = "Apache-2.0"
readme = "README.rst"
packages = [{ include = "pipreqs" }]
repository = "https://github.com/bndr/pipreqs"
keywords = ["pip", "requirements", "imports"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
requires-python = ">=3.9, <3.14"
dependencies = [
"yarg>=0.1.9",
"docopt>=0.6.2",
"nbconvert>=7.11.0",
"ipython>=8.12.3",
]
[project.optional-dependencies]
dev = [
"flake8>=6.1.0",
"tox>=4.11.3",
"coverage>=7.3.2",
"sphinx>=7.2.6;python_version>='3.9'",
]
[tool.poetry.group.dev.dependencies] # for legacy usage
flake8 = "^6.1.0"
tox = "^4.11.3"
coverage = "^7.3.2"
sphinx = { version = "^7.2.6", python = ">=3.9" }
[project.scripts]
pipreqs = "pipreqs.pipreqs:main"
[build-system]
requires = ["poetry-core>=2.0.0,<3.0.0"]
build-backend = "poetry.core.masonry.api"

3
requirements.txt Normal file
View File

@ -0,0 +1,3 @@
wheel==0.23.0
Yarg==0.1.9
docopt==0.6.2

2
setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[wheel]
universal = 1

59
setup.py Executable file
View File

@ -0,0 +1,59 @@
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from pipreqs import __version__
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
'docopt', 'yarg'
]
setup(
name='pipreqs',
version=__version__,
description='Pip requirements.txt generator based on imports in project',
long_description=readme + '\n\n' + history,
author='Vadim Kravcenko',
author_email='vadim.kravcenko@gmail.com',
url='https://github.com/bndr/pipreqs',
packages=[
'pipreqs',
],
package_dir={'pipreqs':
'pipreqs'},
include_package_data=True,
package_data={'': ['stdlib', 'mapping']},
install_requires=requirements,
license='Apache License',
zip_safe=False,
keywords='pip requirements imports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
],
test_suite='tests',
entry_points={
'console_scripts': [
'pipreqs=pipreqs.pipreqs:main',
],
},
python_requires='>=3.7',
)

View File

View File

@ -1,3 +0,0 @@
pandas==2.0.0
numpy>=1.2.3
torch<4.0.0

View File

@ -1,4 +0,0 @@
numpy
pandas==2.0.0
tensorflow
torch<4.0.0

View File

@ -1,3 +0,0 @@
pandas
tensorflow
torch

View File

@ -1,65 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Magic test"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%automagic true"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ls -la\n",
"logstate"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ls -la"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%automagic false"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ls -la"
]
}
],
"metadata": {
"language_info": {
"name": "python"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -1,37 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Markdown test\n",
"import sklearn\n",
"\n",
"```python\n",
"import FastAPI\n",
"```"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.1"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@ -1,102 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\"\"\"unused import\"\"\"\n",
"# pylint: disable=undefined-all-variable, import-error, no-absolute-import, too-few-public-methods, missing-docstring\n",
"import xml.etree # [unused-import]\n",
"import xml.sax # [unused-import]\n",
"import os.path as test # [unused-import]\n",
"from sys import argv as test2 # [unused-import]\n",
"from sys import flags # [unused-import]\n",
"# +1:[unused-import,unused-import]\n",
"from collections import deque, OrderedDict, Counter\n",
"# All imports above should be ignored\n",
"import requests # [unused-import]\n",
"\n",
"# setuptools\n",
"import zipimport # command/easy_install.py\n",
"\n",
"# twisted\n",
"from importlib import invalidate_caches # python/test/test_deprecate.py\n",
"\n",
"# astroid\n",
"import zipimport # manager.py\n",
"# IPython\n",
"from importlib.machinery import all_suffixes # core/completerlib.py\n",
"import importlib # html/notebookapp.py\n",
"\n",
"from IPython.utils.importstring import import_item # Many files\n",
"\n",
"# pyflakes\n",
"# test/test_doctests.py\n",
"from pyflakes.test.test_imports import Test as TestImports\n",
"\n",
"# Nose\n",
"from nose.importer import Importer, add_path, remove_path # loader.py\n",
"\n",
"import atexit\n",
"from __future__ import print_function\n",
"from docopt import docopt\n",
"import curses, logging, sqlite3\n",
"import logging\n",
"import os\n",
"import sqlite3\n",
"import time\n",
"import sys\n",
"import signal\n",
"import bs4\n",
"import nonexistendmodule\n",
"import boto as b, peewee as p\n",
"# import django\n",
"import flask.ext.somext # # #\n",
"from sqlalchemy import model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" import ujson as json\n",
"except ImportError:\n",
" import json\n",
"\n",
"import models\n",
"\n",
"\n",
"def main():\n",
" pass\n",
"\n",
"import after_method_is_valid_even_if_not_pep8"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.1"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@ -1,5 +0,0 @@
import airflow
import numpy
airflow
numpy

View File

@ -1,3 +0,0 @@
import matplotlib
import pandas
import tensorflow

View File

@ -1,34 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cd ."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@ -8,93 +8,53 @@ test_pipreqs
Tests for `pipreqs` module. Tests for `pipreqs` module.
""" """
from io import StringIO
import logging
from unittest.mock import patch, Mock
import unittest import unittest
import os import os
import requests import requests
import sys
import warnings
from pipreqs import pipreqs from pipreqs import pipreqs
class TestPipreqs(unittest.TestCase): class TestPipreqs(unittest.TestCase):
@classmethod def setUp(self):
def setUpClass(cls): self.modules = [
# Disable all logs for not spamming the terminal when running tests. 'flask', 'requests', 'sqlalchemy', 'docopt', 'boto', 'ipython',
logging.disable(logging.CRITICAL) 'pyflakes', 'nose', 'analytics', 'flask_seasurf', 'peewee',
'ujson', 'nonexistendmodule', 'bs4',
# Specific warning not covered by the above command: 'after_method_is_valid_even_if_not_pep8'
warnings.filterwarnings("ignore", category=DeprecationWarning, module="jupyter_client") ]
self.modules2 = ['beautifulsoup4']
cls.modules = [ self.local = ["docopt", "requests", "nose", 'pyflakes']
"flask", self.project = os.path.join(os.path.dirname(__file__), "_data")
"requests", self.project_clean = os.path.join(
"sqlalchemy", os.path.dirname(__file__),
"docopt", "_data_clean"
"boto", )
"ipython", self.project_invalid = os.path.join(
"pyflakes", os.path.dirname(__file__),
"nose", "_invalid_data"
"analytics", )
"flask_seasurf", self.project_with_ignore_directory = os.path.join(
"peewee", os.path.dirname(__file__),
"ujson", "_data_ignore"
"nonexistendmodule", )
"bs4", self.project_with_duplicated_deps = os.path.join(
"after_method_is_valid_even_if_not_pep8", os.path.dirname(__file__),
] "_data_duplicated_deps"
cls.modules2 = ["beautifulsoup4"] )
cls.local = ["docopt", "requests", "nose", "pyflakes", "ipython"] self.requirements_path = os.path.join(self.project, "requirements.txt")
cls.project = os.path.join(os.path.dirname(__file__), "_data") self.alt_requirement_path = os.path.join(
cls.empty_filepath = os.path.join(cls.project, "empty.txt") self.project,
cls.imports_filepath = os.path.join(cls.project, "imports.txt") "requirements2.txt"
cls.imports_no_version_filepath = os.path.join(cls.project, "imports_no_version.txt") )
cls.imports_any_version_filepath = os.path.join(cls.project, "imports_any_version.txt")
cls.non_existent_filepath = os.path.join(cls.project, "non_existent_file.txt")
cls.parsed_packages = [
{"name": "pandas", "version": "2.0.0"},
{"name": "numpy", "version": "1.2.3"},
{"name": "torch", "version": "4.0.0"},
]
cls.parsed_packages_no_version = [
{"name": "pandas", "version": None},
{"name": "tensorflow", "version": None},
{"name": "torch", "version": None},
]
cls.parsed_packages_any_version = [
{"name": "numpy", "version": None},
{"name": "pandas", "version": "2.0.0"},
{"name": "tensorflow", "version": None},
{"name": "torch", "version": "4.0.0"},
]
cls.project_clean = os.path.join(os.path.dirname(__file__), "_data_clean")
cls.project_invalid = os.path.join(os.path.dirname(__file__), "_invalid_data")
cls.project_with_ignore_directory = os.path.join(os.path.dirname(__file__), "_data_ignore")
cls.project_with_duplicated_deps = os.path.join(os.path.dirname(__file__), "_data_duplicated_deps")
cls.requirements_path = os.path.join(cls.project, "requirements.txt")
cls.alt_requirement_path = os.path.join(cls.project, "requirements2.txt")
cls.non_existing_filepath = "xpto"
cls.project_with_notebooks = os.path.join(os.path.dirname(__file__), "_data_notebook")
cls.project_with_invalid_notebooks = os.path.join(os.path.dirname(__file__), "_invalid_data_notebook")
cls.python_path_same_imports = os.path.join(os.path.dirname(__file__), "_data/test.py")
cls.notebook_path_same_imports = os.path.join(os.path.dirname(__file__), "_data_notebook/test.ipynb")
def test_get_all_imports(self): def test_get_all_imports(self):
imports = pipreqs.get_all_imports(self.project) imports = pipreqs.get_all_imports(self.project)
self.assertEqual(len(imports), 15) self.assertEqual(len(imports), 15)
for item in imports: for item in imports:
self.assertTrue(item.lower() in self.modules, "Import is missing: " + item) self.assertTrue(
item.lower() in self.modules, "Import is missing: " + item)
self.assertFalse("time" in imports) self.assertFalse("time" in imports)
self.assertFalse("logging" in imports) self.assertFalse("logging" in imports)
self.assertFalse("curses" in imports) self.assertFalse("curses" in imports)
@ -112,14 +72,8 @@ class TestPipreqs(unittest.TestCase):
""" """
Test that invalid python files cannot be imported. Test that invalid python files cannot be imported.
""" """
self.assertRaises(SyntaxError, pipreqs.get_all_imports, self.project_invalid) self.assertRaises(
SyntaxError, pipreqs.get_all_imports, self.project_invalid)
def test_ignore_errors(self):
"""
Test that invalid python files do not raise an exception when ignore_errors is True.
"""
imports = pipreqs.get_all_imports(self.project_invalid, ignore_errors=True)
self.assertEqual(len(imports), 0)
def test_get_imports_info(self): def test_get_imports_info(self):
""" """
@ -132,14 +86,13 @@ class TestPipreqs(unittest.TestCase):
self.assertEqual(len(with_info), 13) self.assertEqual(len(with_info), 13)
for item in with_info: for item in with_info:
self.assertTrue( self.assertTrue(
item["name"].lower() in self.modules, item['name'].lower() in self.modules,
"Import item appears to be missing " + item["name"], "Import item appears to be missing " + item['name'])
)
def test_get_pkg_names(self): def test_get_pkg_names(self):
pkgs = ["jury", "Japan", "camel", "Caroline"] pkgs = ['jury', 'Japan', 'camel', 'Caroline']
actual_output = pipreqs.get_pkg_names(pkgs) actual_output = pipreqs.get_pkg_names(pkgs)
expected_output = ["camel", "Caroline", "Japan", "jury"] expected_output = ['camel', 'Caroline', 'Japan', 'jury']
self.assertEqual(actual_output, expected_output) self.assertEqual(actual_output, expected_output)
def test_get_use_local_only(self): def test_get_use_local_only(self):
@ -154,33 +107,22 @@ class TestPipreqs(unittest.TestCase):
# should find only docopt and requests # should find only docopt and requests
imports_with_info = pipreqs.get_import_local(self.modules) imports_with_info = pipreqs.get_import_local(self.modules)
for item in imports_with_info: for item in imports_with_info:
self.assertTrue(item["name"].lower() in self.local) self.assertTrue(item['name'].lower() in self.local)
def test_init(self): def test_init(self):
""" """
Test that all modules we will test upon are in requirements file Test that all modules we will test upon are in requirements file
""" """
pipreqs.init( pipreqs.init({'<path>': self.project, '--savepath': None, '--print': False,
{ '--use-local': None, '--force': True, '--proxy':None, '--pypi-server':None,
"<path>": self.project, '--diff': None, '--clean': None, '--mode': None})
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1 assert os.path.exists(self.requirements_path) == 1
with open(self.requirements_path, "r") as f: with open(self.requirements_path, "r") as f:
data = f.read().lower() data = f.read().lower()
for item in self.modules[:-3]: for item in self.modules[:-3]:
self.assertTrue(item.lower() in data) self.assertTrue(item.lower() in data)
# It should be sorted based on names. # It should be sorted based on names.
data = data.strip().split("\n") data = data.strip().split('\n')
self.assertEqual(data, sorted(data)) self.assertEqual(data, sorted(data))
def test_init_local_only(self): def test_init_local_only(self):
@ -188,20 +130,9 @@ class TestPipreqs(unittest.TestCase):
Test that items listed in requirements.text are the same Test that items listed in requirements.text are the same
as locals expected as locals expected
""" """
pipreqs.init( pipreqs.init({'<path>': self.project, '--savepath': None, '--print': False,
{ '--use-local': True, '--force': True, '--proxy':None, '--pypi-server':None,
"<path>": self.project, '--diff': None, '--clean': None, '--mode': None})
"--savepath": None,
"--print": False,
"--use-local": True,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1 assert os.path.exists(self.requirements_path) == 1
with open(self.requirements_path, "r") as f: with open(self.requirements_path, "r") as f:
data = f.readlines() data = f.readlines()
@ -214,19 +145,9 @@ class TestPipreqs(unittest.TestCase):
Test that we can save requirements.txt correctly Test that we can save requirements.txt correctly
to a different path to a different path
""" """
pipreqs.init( pipreqs.init({'<path>': self.project, '--savepath': self.alt_requirement_path,
{ '--use-local': None, '--proxy':None, '--pypi-server':None, '--print': False,
"<path>": self.project, '--diff': None, '--clean': None, '--mode': None})
"--savepath": self.alt_requirement_path,
"--use-local": None,
"--proxy": None,
"--pypi-server": None,
"--print": False,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.alt_requirement_path) == 1 assert os.path.exists(self.alt_requirement_path) == 1
with open(self.alt_requirement_path, "r") as f: with open(self.alt_requirement_path, "r") as f:
data = f.read().lower() data = f.read().lower()
@ -242,20 +163,9 @@ class TestPipreqs(unittest.TestCase):
""" """
with open(self.requirements_path, "w") as f: with open(self.requirements_path, "w") as f:
f.write("should_not_be_overwritten") f.write("should_not_be_overwritten")
pipreqs.init( pipreqs.init({'<path>': self.project, '--savepath': None, '--use-local': None,
{ '--force': None, '--proxy':None, '--pypi-server':None, '--print': False,
"<path>": self.project, '--diff': None, '--clean': None, '--mode': None})
"--savepath": None,
"--use-local": None,
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--print": False,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1 assert os.path.exists(self.requirements_path) == 1
with open(self.requirements_path, "r") as f: with open(self.requirements_path, "r") as f:
data = f.read().lower() data = f.read().lower()
@ -270,49 +180,41 @@ class TestPipreqs(unittest.TestCase):
""" """
import_name_with_alias = "requests as R" import_name_with_alias = "requests as R"
expected_import_name_without_alias = "requests" expected_import_name_without_alias = "requests"
import_name_without_aliases = pipreqs.get_name_without_alias(import_name_with_alias) import_name_without_aliases = pipreqs.get_name_without_alias(
self.assertEqual(import_name_without_aliases, expected_import_name_without_alias) import_name_with_alias)
self.assertEqual(
import_name_without_aliases,
expected_import_name_without_alias
)
def test_custom_pypi_server(self): def test_custom_pypi_server(self):
""" """
Test that trying to get a custom pypi sever fails correctly Test that trying to get a custom pypi sever fails correctly
""" """
self.assertRaises( self.assertRaises(
requests.exceptions.MissingSchema, requests.exceptions.MissingSchema, pipreqs.init,
pipreqs.init, {'<path>': self.project, '--savepath': None, '--print': False,
{ '--use-local': None, '--force': True, '--proxy': None,
"<path>": self.project, '--pypi-server': 'nonexistent'}
"--savepath": None, )
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": "nonexistent",
},
)
def test_ignored_directory(self): def test_ignored_directory(self):
""" """
Test --ignore parameter Test --ignore parameter
""" """
pipreqs.init( pipreqs.init(
{ {'<path>': self.project_with_ignore_directory, '--savepath': None,
"<path>": self.project_with_ignore_directory, '--print': False, '--use-local': None, '--force': True,
"--savepath": None, '--proxy':None, '--pypi-server':None,
"--print": False, '--ignore':'.ignored_dir,.ignore_second',
"--use-local": None, '--diff': None,
"--force": True, '--clean': None,
"--proxy": None, '--mode': None
"--pypi-server": None, }
"--ignore": ".ignored_dir,.ignore_second",
"--diff": None,
"--clean": None,
"--mode": None,
}
) )
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f: with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.read().lower() data = f.read().lower()
for item in ["click", "getpass"]: for item in ['click', 'getpass']:
self.assertFalse(item.lower() in data) self.assertFalse(item.lower() in data)
def test_dynamic_version_no_pin_scheme(self): def test_dynamic_version_no_pin_scheme(self):
@ -320,22 +222,17 @@ class TestPipreqs(unittest.TestCase):
Test --mode=no-pin Test --mode=no-pin
""" """
pipreqs.init( pipreqs.init(
{ {'<path>': self.project_with_ignore_directory, '--savepath': None,
"<path>": self.project_with_ignore_directory, '--print': False, '--use-local': None, '--force': True,
"--savepath": None, '--proxy': None, '--pypi-server': None,
"--print": False, '--diff': None,
"--use-local": None, '--clean': None,
"--force": True, '--mode': 'no-pin'
"--proxy": None, }
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": "no-pin",
}
) )
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f: with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.read().lower() data = f.read().lower()
for item in ["beautifulsoup4", "boto"]: for item in ['beautifulsoup4', 'boto']:
self.assertTrue(item.lower() in data) self.assertTrue(item.lower() in data)
def test_dynamic_version_gt_scheme(self): def test_dynamic_version_gt_scheme(self):
@ -343,24 +240,20 @@ class TestPipreqs(unittest.TestCase):
Test --mode=gt Test --mode=gt
""" """
pipreqs.init( pipreqs.init(
{ {'<path>': self.project_with_ignore_directory, '--savepath': None, '--print': False,
"<path>": self.project_with_ignore_directory, '--use-local': None, '--force': True,
"--savepath": None, '--proxy': None,
"--print": False, '--pypi-server': None,
"--use-local": None, '--diff': None,
"--force": True, '--clean': None,
"--proxy": None, '--mode': 'gt'
"--pypi-server": None, }
"--diff": None,
"--clean": None,
"--mode": "gt",
}
) )
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f: with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.readlines() data = f.readlines()
for item in data: for item in data:
symbol = ">=" symbol = '>='
message = "symbol is not in item" message = 'symbol is not in item'
self.assertIn(symbol, item, message) self.assertIn(symbol, item, message)
def test_dynamic_version_compat_scheme(self): def test_dynamic_version_compat_scheme(self):
@ -368,24 +261,20 @@ class TestPipreqs(unittest.TestCase):
Test --mode=compat Test --mode=compat
""" """
pipreqs.init( pipreqs.init(
{ {'<path>': self.project_with_ignore_directory, '--savepath': None, '--print': False,
"<path>": self.project_with_ignore_directory, '--use-local': None, '--force': True,
"--savepath": None, '--proxy': None,
"--print": False, '--pypi-server': None,
"--use-local": None, '--diff': None,
"--force": True, '--clean': None,
"--proxy": None, '--mode': 'compat'
"--pypi-server": None, }
"--diff": None,
"--clean": None,
"--mode": "compat",
}
) )
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f: with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.readlines() data = f.readlines()
for item in data: for item in data:
symbol = "~=" symbol = '~='
message = "symbol is not in item" message = 'symbol is not in item'
self.assertIn(symbol, item, message) self.assertIn(symbol, item, message)
def test_clean(self): def test_clean(self):
@ -393,34 +282,18 @@ class TestPipreqs(unittest.TestCase):
Test --clean parameter Test --clean parameter
""" """
pipreqs.init( pipreqs.init(
{ {'<path>': self.project, '--savepath': None, '--print': False,
"<path>": self.project, '--use-local': None, '--force': True, '--proxy': None,
"--savepath": None, '--pypi-server': None, '--diff': None, '--clean': None,
"--print": False, '--mode': None}
"--use-local": None, )
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1 assert os.path.exists(self.requirements_path) == 1
pipreqs.init( pipreqs.init(
{ {'<path>': self.project, '--savepath': None, '--print': False,
"<path>": self.project, '--use-local': None, '--force': None, '--proxy': None,
"--savepath": None, '--pypi-server': None, '--diff': None,
"--print": False, '--clean': self.requirements_path, '--mode': 'non-pin'}
"--use-local": None, )
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": self.requirements_path,
"--mode": "non-pin",
}
)
with open(self.requirements_path, "r") as f: with open(self.requirements_path, "r") as f:
data = f.read().lower() data = f.read().lower()
for item in self.modules[:-3]: for item in self.modules[:-3]:
@ -430,255 +303,25 @@ class TestPipreqs(unittest.TestCase):
""" """
Test --clean parameter when there are imports to clean Test --clean parameter when there are imports to clean
""" """
cleaned_module = "sqlalchemy" cleaned_module = 'sqlalchemy'
pipreqs.init( pipreqs.init(
{ {'<path>': self.project, '--savepath': None, '--print': False,
"<path>": self.project, '--use-local': None, '--force': True, '--proxy': None,
"--savepath": None, '--pypi-server': None, '--diff': None, '--clean': None,
"--print": False, '--mode': None}
"--use-local": None, )
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1 assert os.path.exists(self.requirements_path) == 1
modules_clean = [m for m in self.modules if m != cleaned_module]
pipreqs.init( pipreqs.init(
{ {'<path>': self.project_clean, '--savepath': None,
"<path>": self.project_clean, '--print': False, '--use-local': None, '--force': None,
"--savepath": None, '--proxy': None, '--pypi-server': None, '--diff': None,
"--print": False, '--clean': self.requirements_path, '--mode': 'non-pin'}
"--use-local": None, )
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": self.requirements_path,
"--mode": "non-pin",
}
)
with open(self.requirements_path, "r") as f: with open(self.requirements_path, "r") as f:
data = f.read().lower() data = f.read().lower()
self.assertTrue(cleaned_module not in data) self.assertTrue(cleaned_module not in data)
def test_compare_modules(self):
test_cases = [
(self.empty_filepath, [], set()), # both empty
(self.empty_filepath, self.parsed_packages, set()), # only file empty
(
self.imports_filepath,
[],
set(package["name"] for package in self.parsed_packages),
), # only imports empty
(self.imports_filepath, self.parsed_packages, set()), # no difference
(
self.imports_filepath,
self.parsed_packages[1:],
set([self.parsed_packages[0]["name"]]),
), # common case
]
for test_case in test_cases:
with self.subTest(test_case):
filename, imports, expected_modules_not_imported = test_case
modules_not_imported = pipreqs.compare_modules(filename, imports)
self.assertSetEqual(modules_not_imported, expected_modules_not_imported)
def test_output_requirements(self):
"""
Test --print parameter
It should print to stdout the same content as requeriments.txt
"""
capturedOutput = StringIO()
sys.stdout = capturedOutput
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": True,
"--use-local": None,
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
with open(self.requirements_path, "r") as f:
file_content = f.read().lower()
stdout_content = capturedOutput.getvalue().lower()
self.assertTrue(file_content == stdout_content)
def test_import_notebooks(self):
"""
Test the function get_all_imports() using .ipynb file
"""
self.mock_scan_notebooks()
imports = pipreqs.get_all_imports(self.project_with_notebooks)
for item in imports:
self.assertTrue(item.lower() in self.modules, "Import is missing: " + item)
not_desired_imports = ["time", "logging", "curses", "__future__", "django", "models", "FastAPI", "sklearn"]
for not_desired_import in not_desired_imports:
self.assertFalse(
not_desired_import in imports,
f"{not_desired_import} was imported, but it should not have been."
)
def test_invalid_notebook(self):
"""
Test that invalid notebook files cannot be imported.
"""
self.mock_scan_notebooks()
self.assertRaises(SyntaxError, pipreqs.get_all_imports, self.project_with_invalid_notebooks)
def test_ipynb_2_py(self):
"""
Test the function ipynb_2_py() which converts .ipynb file to .py format
"""
python_imports = pipreqs.get_all_imports(self.python_path_same_imports)
notebook_imports = pipreqs.get_all_imports(self.notebook_path_same_imports)
self.assertEqual(python_imports, notebook_imports)
def test_file_ext_is_allowed(self):
"""
Test the function file_ext_is_allowed()
"""
self.assertTrue(pipreqs.file_ext_is_allowed("main.py", [".py"]))
self.assertTrue(pipreqs.file_ext_is_allowed("main.py", [".py", ".ipynb"]))
self.assertFalse(pipreqs.file_ext_is_allowed("main.py", [".ipynb"]))
def test_parse_requirements(self):
"""
Test parse_requirements function
"""
test_cases = [
(self.empty_filepath, []), # empty file
(self.imports_filepath, self.parsed_packages), # imports with versions
(
self.imports_no_version_filepath,
self.parsed_packages_no_version,
), # imports without versions
(
self.imports_any_version_filepath,
self.parsed_packages_any_version,
), # imports with and without versions
]
for test in test_cases:
with self.subTest(test):
filename, expected_parsed_requirements = test
parsed_requirements = pipreqs.parse_requirements(filename)
self.assertListEqual(parsed_requirements, expected_parsed_requirements)
@patch("sys.exit")
def test_parse_requirements_handles_file_not_found(self, exit_mock):
captured_output = StringIO()
sys.stdout = captured_output
# This assertion is needed, because since "sys.exit" is mocked, the program won't end,
# and the code that is after the except block will be run
with self.assertRaises(UnboundLocalError):
pipreqs.parse_requirements(self.non_existing_filepath)
exit_mock.assert_called_once_with(1)
printed_text = captured_output.getvalue().strip()
sys.stdout = sys.__stdout__
self.assertEqual(printed_text, "File xpto was not found. Please, fix it and run again.")
def test_ignore_notebooks(self):
"""
Test if notebooks are ignored when the scan-notebooks parameter is False
"""
notebook_requirement_path = os.path.join(self.project_with_notebooks, "requirements.txt")
pipreqs.init(
{
"<path>": self.project_with_notebooks,
"--savepath": None,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--print": False,
"--diff": None,
"--clean": None,
"--mode": None,
"--scan-notebooks": False,
}
)
assert os.path.exists(notebook_requirement_path) == 1
assert os.path.getsize(notebook_requirement_path) == 1 # file only has a "\n", meaning it's empty
def test_pipreqs_get_imports_from_pyw_file(self):
pyw_test_dirpath = os.path.join(os.path.dirname(__file__), "_data_pyw")
requirements_path = os.path.join(pyw_test_dirpath, "requirements.txt")
pipreqs.init(
{
"<path>": pyw_test_dirpath,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
self.assertTrue(os.path.exists(requirements_path))
expected_imports = [
"airflow",
"matplotlib",
"numpy",
"pandas",
"tensorflow",
]
with open(requirements_path, "r") as f:
imports_data = f.read().lower()
for _import in expected_imports:
self.assertTrue(
_import.lower() in imports_data,
f"'{_import}' import was expected but not found.",
)
os.remove(requirements_path)
def mock_scan_notebooks(self):
pipreqs.scan_noteboooks = Mock(return_value=True)
pipreqs.handle_scan_noteboooks()
def tearDown(self): def tearDown(self):
""" """
Remove requiremnts.txt files that were written Remove requiremnts.txt files that were written
@ -693,5 +336,5 @@ class TestPipreqs(unittest.TestCase):
pass pass
if __name__ == "__main__": if __name__ == '__main__':
unittest.main() unittest.main()

29
tox.ini
View File

@ -1,31 +1,16 @@
[tox] [tox]
isolated_build = true envlist = py37, py38, py39, pypy3, flake8
envlist = py39, py310, py311, py312, py313, pypy3, flake8
[gh-actions] [gh-actions]
python = python =
3.7: py37
3.8: py38
3.9: py39 3.9: py39
3.10: py310 pypy-3.7: pypy3
3.11: py311
3.12: py312
3.13: py313
pypy-3.10: pypy3
[testenv] [testenv]
setenv = setenv =
PYTHONPATH = {toxinidir}:{toxinidir}/pipreqs PYTHONPATH = {toxinidir}:{toxinidir}/pipreqs
commands = commands = python setup.py test
python -m unittest discover deps =
-r{toxinidir}/requirements.txt
[testenv:flake8]
deps = flake8
commands = flake8 pipreqs tests
[flake8]
exclude =
tests/_data/
tests/_data_clean/
tests/_data_duplicated_deps/
tests/_data_ignore/
tests/_invalid_data/
max-line-length = 120