Compare commits

..

No commits in common. "master" and "v0.4.3" have entirely different histories.

38 changed files with 1102 additions and 5285 deletions

1
.coveralls.yml Normal file
View File

@ -0,0 +1 @@
service_name: "travis-ci"

View File

@ -1,34 +0,0 @@
name: flake8
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
push:
tags:
- "*"
branches:
- main
- master
- develop
- "release/*"
pull_request:
jobs:
flake8-lint:
runs-on: ubuntu-24.04
name: Lint
steps:
- name: Check out source repository
uses: actions/checkout@v4
- name: Set up Python environment
uses: actions/setup-python@v5
with:
python-version: "3.13"
- name: flake8 Lint
uses: reviewdog/action-flake8@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
reporter: github-pr-review

View File

@ -1,65 +0,0 @@
name: Tests and Codecov
on:
push:
branches:
- master
- main
- "release/*"
pull_request:
workflow_dispatch:
jobs:
run_tests:
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy-3.10']
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install uv
uv pip install --system tox tox-gh-actions
- name: Test with tox
run: tox
coverage_report:
needs: run_tests
runs-on: ubuntu-24.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python 3.13
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: Install dependencies
run: |
python -m pip install uv
uv pip install --system poetry
uv pip install --system .[dev]
- name: Calculate coverage
run: poetry run coverage run --source=pipreqs -m unittest discover
- name: Create XML report
run: poetry run coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
files: coverage.xml
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false

View File

@ -1,96 +0,0 @@
ci:
autoupdate_commit_msg: "chore: update pre-commit hooks"
autofix_commit_msg: "style: pre-commit fixes"
autoupdate_schedule: quarterly
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
args: [ '--maxkb=1000' ]
- id: check-case-conflict
- id: check-merge-conflict
- id: check-symlinks
- id: check-yaml
- id: check-toml
- id: check-json
- id: debug-statements
- id: end-of-file-fixer
- id: mixed-line-ending
- id: requirements-txt-fixer
- id: trailing-whitespace
files: ".*\\.(?:tex|py)$"
args: [ --markdown-linebreak-ext=md ]
exclude: (^notebooks/|^tests/truth/)
- id: detect-private-key
- id: fix-byte-order-marker
- id: check-ast
- id: check-docstring-first
- id: debug-statements
- repo: https://github.com/pre-commit/pygrep-hooks
rev: v1.10.0
hooks:
- id: python-use-type-annotations
- id: python-check-mock-methods
- id: python-no-eval
- id: rst-backticks
- id: rst-directive-colons
- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args: [ --py38-plus ]
# Notebook formatting
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.9.1
hooks:
- id: nbqa-isort
additional_dependencies: [ isort ]
- id: nbqa-pyupgrade
additional_dependencies: [ pyupgrade ]
args: [ --py38-plus ]
- repo: https://github.com/kynan/nbstripout
rev: 0.8.1
hooks:
- id: nbstripout
- repo: https://github.com/sondrelg/pep585-upgrade
rev: 'v1.0'
hooks:
- id: upgrade-type-hints
args: [ '--futures=true' ]
- repo: https://github.com/MarcoGorelli/auto-walrus
rev: 0.3.4
hooks:
- id: auto-walrus
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.30.0
hooks:
- id: check-github-workflows
- id: check-github-actions
- id: check-dependabot
- id: check-readthedocs
- repo: https://github.com/dannysepler/rm_unneeded_f_str
rev: v0.2.0
hooks:
- id: rm-unneeded-f-str
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.8.6"
hooks:
- id: ruff
types_or: [ python, pyi, jupyter ]
args: [ --fix, --show-fixes , --line-length=120 ] # --unsafe-fixes,
# Run the formatter.
- id: ruff-format
types_or: [ python, pyi, jupyter ]

View File

@ -1,7 +0,0 @@
3.13
3.12
3.11
3.10
3.9
3.8
pypy3.9-7.3.12

View File

@ -1 +0,0 @@
python 3.13 3.12 3.11 3.10 3.9 3.8 pypy3.9-7.3.12

21
.travis.yml Normal file
View File

@ -0,0 +1,21 @@
# Config file for automatic testing at travis-ci.org
language: python
python:
- "3.4"
- "3.3"
- "2.7"
- "2.6"
- "pypy"
# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors
install:
- "pip install -r requirements.txt"
- "pip install coverage"
- "pip install coveralls"
# command to run tests, e.g. python setup.py test
script: coverage run --source=pipreqs setup.py test
after_success:
coveralls

View File

@ -10,5 +10,4 @@ Development Lead
Contributors
------------
* Jake Teo <mapattacker@gmail.com>
* Jerome Chan <cjerome94@gmail.com>
None yet. Why not be the first?

View File

@ -61,11 +61,12 @@ Ready to contribute? Here's how to set up `pipreqs` for local development.
2. Clone your fork locally::
$ git clone git@github.com:your_name_here/pipreqs.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv pipreqs
$ cd pipreqs/
3. Pipreqs is developed using Poetry. Refer to the `documentation <https://python-poetry.org/docs/>`_ to install Poetry in your local environment. Next, you should install pipreqs's dependencies::
$ poetry install --with dev
$ python setup.py develop
4. Create a branch for local development::
@ -75,11 +76,11 @@ Ready to contribute? Here's how to set up `pipreqs` for local development.
5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox::
$ poetry run flake8 pipreqs tests
$ poetry run python -m unittest discover
$ poetry run tox
To test all versions of python using tox you need to have them installed and for this two options are recommended: `pyenv` or `asdf`.
$ flake8 pipreqs tests
$ python setup.py test
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
@ -98,13 +99,13 @@ Before you submit a pull request, check that it meets these guidelines:
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for currently supported Python and PyPy versions. Check
https://travis-ci.org/bndr/pipreqs/pull_requests and make sure that the
tests pass for all supported Python versions.
3. The pull request should work for Python 2.6, 2.7, 3.3, and 3.4, and for PyPy. Check
https://travis-ci.org/bndr/pipreqs/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ poetry run python -m unittest tests.test_pipreqs
$ python -m unittest tests.test_pipreqs

View File

@ -3,34 +3,6 @@
History
-------
0.4.11 (2020-03-29)
--------------------
* Implement '--mode' (Jake Teo, Jerome Chan)
0.4.8 (2017-06-30)
--------------------
* Implement '--clean' and '--diff' (kxrd)
* Exclude concurrent{,.futures} from stdlib if py2 (kxrd)
0.4.7 (2017-04-20)
--------------------
* BUG: remove package/version duplicates
* Style: pep8
0.4.5 (2016-12-13)
---------------------
* Fixed the --pypi-server option
0.4.4 (2016-07-14)
---------------------
* Remove Spaces in output
* Add package to output even without version
0.4.2 (2016-02-10)
---------------------

13
MANIFEST.in Normal file
View File

@ -0,0 +1,13 @@
include AUTHORS.rst
include CONTRIBUTING.rst
include HISTORY.rst
include LICENSE
include README.rst
include pipreqs/stdlib
include pipreqs/mapping
recursive-include tests *
recursive-exclude * __pycache__
recursive-exclude * *.py[co]
recursive-include docs *.rst conf.py Makefile make.bat stdlib mapping

View File

@ -6,14 +6,13 @@ help:
@echo "clean-pyc - remove Python file artifacts"
@echo "clean-test - remove test and coverage artifacts"
@echo "lint - check style with flake8"
@echo "test - run tests quickly using the default Python"
@echo "test - run tests quickly with the default Python"
@echo "test-all - run tests on every Python version with tox"
@echo "coverage - check code coverage quickly with the default Python"
@echo "docs - generate Sphinx HTML documentation, including API docs"
@echo "publish - package and upload a release"
@echo "publish-to-test - package and upload a release to test-pypi"
@echo "build - build the package"
@echo "install - install the dependencies into the Poetry virtual environment"
@echo "release - package and upload a release"
@echo "dist - package"
@echo "install - install the package to the active Python's site-packages"
clean: clean-build clean-pyc clean-test
@ -36,13 +35,14 @@ clean-test:
rm -fr htmlcov/
lint:
poetry run flake8 pipreqs tests
flake8 pipreqs tests
test:
poetry run python -m unittest discover
pip install -r requirements.txt
python setup.py test
test-all:
poetry run tox
tox
coverage:
coverage run --source pipreqs setup.py test
@ -58,14 +58,13 @@ docs:
$(MAKE) -C docs html
open docs/_build/html/index.html
publish: build
poetry publish
release: clean
python setup.py sdist bdist_wheel upload -r pypi
publish-to-test: build
poetry publish --repository test-pypi
build: clean
poetry build
dist: clean
python setup.py sdist
python setup.py bdist_wheel
ls -l dist
install: clean
poetry install --with dev
python setup.py install

View File

@ -1,72 +1,53 @@
=============================================================================
===============================
``pipreqs`` - Generate requirements.txt file for any project based on imports
=============================================================================
.. image:: https://github.com/bndr/pipreqs/actions/workflows/tests.yml/badge.svg
:target: https://github.com/bndr/pipreqs/actions/workflows/tests.yml
===============================
.. image:: https://img.shields.io/travis/bndr/pipreqs.svg
:target: https://travis-ci.org/bndr/pipreqs
.. image:: https://img.shields.io/pypi/v/pipreqs.svg
:target: https://pypi.python.org/pypi/pipreqs
.. image:: https://codecov.io/gh/bndr/pipreqs/branch/master/graph/badge.svg?token=0rfPfUZEAX
:target: https://codecov.io/gh/bndr/pipreqs
.. image:: https://img.shields.io/pypi/l/pipreqs.svg
.. image:: https://img.shields.io/pypi/dm/pipreqs.svg
:target: https://pypi.python.org/pypi/pipreqs
.. image:: https://img.shields.io/coveralls/bndr/pipreqs.svg
:target: https://coveralls.io/r/bndr/pipreqs
.. image:: https://img.shields.io/pypi/l/pipreqs.svg
:target: https://pypi.python.org/pypi/pipreqs
Installation
------------
.. code-block:: sh
::
pip install pipreqs
Obs.: if you don't want support for jupyter notebooks, you can install pipreqs without the dependencies that give support to it.
To do so, run:
.. code-block:: sh
pip install --no-deps pipreqs
pip install yarg==0.1.9 docopt==0.6.2
Usage
-----
::
Usage:
pipreqs [options] [<path>]
Arguments:
<path> The path to the directory containing the application files for which a requirements file
should be generated (defaults to the current working directory)
pipreqs [options] <path>
Options:
--use-local Use ONLY local package info instead of querying PyPI
--pypi-server <url> Use custom PyPi server
--proxy <url> Use Proxy, parameter will be passed to requests library. You can also just set the
--pypi-server Use custom PyPi server
--proxy Use Proxy, parameter will be passed to requests library. You can also just set the
environments parameter in your terminal:
$ export HTTP_PROXY="http://10.10.1.10:3128"
$ export HTTPS_PROXY="https://10.10.1.10:1080"
--debug Print debug information
--ignore <dirs>... Ignore extra directories, each separated by a comma
--no-follow-links Do not follow symbolic links in the project
--ignore-errors Ignore errors while scanning files
--ignore <dirs>... Ignore extra directories
--encoding <charset> Use encoding parameter for file open
--savepath <file> Save the list of requirements in the given file
--print Output the list of requirements in the standard output
--force Overwrite existing requirements.txt
--diff <file> Compare modules in requirements.txt to project imports
--clean <file> Clean up requirements.txt by removing modules that are not imported in project
--mode <scheme> Enables dynamic versioning with <compat>, <gt> or <non-pin> schemes
<compat> | e.g. Flask~=1.1.2
<gt> | e.g. Flask>=1.1.2
<no-pin> | e.g. Flask
--scan-notebooks Look for imports in jupyter notebook files.
Example
-------
@ -83,10 +64,10 @@ Contents of requirements.txt
wheel==0.23.0
Yarg==0.1.9
docopt==0.6.2
Why not pip freeze?
-------------------
- ``pip freeze`` only saves the packages that are installed with ``pip install`` in your environment.
- ``pip freeze`` saves all packages in the environment including those that you don't use in your current project (if you don't have ``virtualenv``).
- and sometimes you just need to create ``requirements.txt`` for a new project without installing modules.
- ``pip freeze`` only saves the packages that are installed with ``pip install`` in your environment.
- pip freeze saves all packages in the environment including those that you don't use in your current project. (if you don't have virtualenv)
- and sometimes you just need to create requirements.txt for a new project without installing modules.

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
__author__ = 'Vadim Kravcenko'
__email__ = 'vadim.kravcenko@gmail.com'
__version__ = '0.4.13'
__version__ = '0.4.3'

View File

@ -1,4 +1,3 @@
AFQ:pyAFQ
AG_fft_tools:agpy
ANSI:pexpect
Adafruit:Adafruit_Libraries
@ -10,12 +9,9 @@ BeautifulSoupTests:BeautifulSoup
BioSQL:biopython
BuildbotStatusShields:BuildbotEightStatusShields
ComputedAttribute:ExtensionClass
constraint:python-constraint
Crypto:pycryptodome
Cryptodome:pycryptodomex
Crypto:pycrypto
FSM:pexpect
FiftyOneDegrees:51degrees_mobile_detector_v3_wrapper
functional:pyfunctional
GeoBaseMain:GeoBasesDev
GeoBases:GeoBasesDev
Globals:Zope2
@ -25,7 +21,6 @@ Kittens:astro_kittens
Levenshtein:python_Levenshtein
Lifetime:Zope2
MethodObject:ExtensionClass
MySQLdb:MySQL-python
OFS:Zope2
OpenGL:PyOpenGL
OpenSSL:pyOpenSSL
@ -36,7 +31,6 @@ Pyxides:astro_pyxis
QtCore:PySide
S3:s3cmd
SCons:pystick
speech_recognition:SpeechRecognition
Shared:Zope2
Signals:Zope2
Stemmer:PyStemmer
@ -131,7 +125,6 @@ aios3:aio_s3
airbrake:airbrake_flask
airship:airship_icloud
airship:airship_steamcloud
airflow:apache-airflow
akamai:edgegrid_python
alation:alation_api
alba_client:alba_client_python
@ -270,6 +263,7 @@ armstrong:armstrong.hatband
armstrong:armstrong.templates.standard
armstrong:armstrong.utils.backends
armstrong:armstrong.utils.celery
arrow:arrow_fatisar
arstecnica:arstecnica.raccoon.autobahn
arstecnica:arstecnica.sqlalchemy.async
article-downloader:article_downloader
@ -542,7 +536,6 @@ cassandra:cassandra_driver
cassandralauncher:CassandraLauncher
cc42:42qucc
cerberus:Cerberus
cfnlint:cfn-lint
chameleon:Chameleon
charmtools:charm_tools
chef:PyChef
@ -583,23 +576,19 @@ ctff:tff
cups:pycups
curator:elasticsearch_curator
curl:pycurl
cv2:opencv-python
daemon:python_daemon
dare:DARE
dateutil:python_dateutil
dawg:DAWG
deb822:python_debian
debian:python_debian
decouple:python-decouple
demo:webunit
demosongs:PySynth
deployer:juju_deployer
depot:filedepot
devtools:tg.devtools
dgis:2gis
dhtmlparser:pyDHTMLParser
digitalocean:python_digitalocean
discord:discord.py
distribute_setup:ez_setup
distutils2:Distutils2
django:Django
@ -617,7 +606,6 @@ dogshell:dogapi
dot_parser:pydot
dot_parser:pydot2
dot_parser:pydot3k
dotenv:python-dotenv
dpkt:dpkt_fix
dsml:python_ldap
durationfield:django_durationfield
@ -683,7 +671,6 @@ geventwebsocket:gevent_websocket
gflags:python_gflags
git:GitPython
github:PyGithub
github3:github3.py
gitpy:git_py
globusonline:globusonline_transfer_api_client
google:protobuf
@ -710,7 +697,6 @@ html:pies2overrides
htmloutput:nosehtmloutput
http:pies2overrides
hvad:django_hvad
hydra:hydra-core
i99fix:199Fix
igraph:python_igraph
imdb:IMDbPY
@ -722,7 +708,6 @@ jaraco:jaraco.util
jinja2:Jinja2
jiracli:jira_cli
johnny:johnny_cache
jose:python_jose
jpgrid:python_geohash
jpiarea:python_geohash
jpype:JPype1
@ -737,7 +722,6 @@ keyczar:python_keyczar
keyedcache:django_keyedcache
keystoneclient:python_keystoneclient
kickstarter:kickstart
krbv:krbV
kss:kss.core
kuyruk:Kuyruk
langconv:AdvancedLangConv
@ -786,8 +770,6 @@ mimeparse:python_mimeparse
minitage:minitage.paste
minitage:minitage.recipe.common
missingdrawables:android_missingdrawables
mixfiles:PySynth
mkfreq:PySynth
mkrst_themes:2lazy2rest
mockredis:mockredispy
modargs:python_modargs
@ -803,7 +785,6 @@ monthdelta:MonthDelta
mopidy:Mopidy
mopytools:MoPyTools
mptt:django_mptt
mpv:python-mpv
mrbob:mr.bob
msgpack:msgpack_python
mutations:aino_mutations
@ -818,7 +799,7 @@ nester:abofly
nester:bssm_pythonSig
novaclient:python_novaclient
oauth2_provider:alauda_django_oauth
oauth2client:oauth2client
oauth2client:google_api_python_client
odf:odfpy
ometa:Parsley
openid:python_openid
@ -839,14 +820,12 @@ past:future
paste:PasteScript
path:forked_path
path:path.py
patricia:patricia-trie
paver:Paver
peak:ProxyTypes
picasso:anderson.picasso
picklefield:django-picklefield
pilot:BigJob
pivotal:pivotal_py
play_wav:PySynth
playhouse:peewee
plivoxml:plivo
plone:plone.alterego
@ -930,9 +909,9 @@ plone:plone.z3cform
plonetheme:plonetheme.barceloneta
png:pypng
polymorphic:django_polymorphic
portalocker:ConcurrentLogHandler
postmark:python_postmark
powerprompt:bash_powerprompt
prefetch:django-prefetch
printList:AndrewList
progressbar:progressbar2
progressbar:progressbar33
@ -967,18 +946,9 @@ pyrimaa:AEI
pysideuic:PySide
pysqlite2:adhocracy_pysqlite
pysqlite2:pysqlite
pysynth_b:PySynth
pysynth_beeper:PySynth
pysynth_c:PySynth
pysynth_d:PySynth
pysynth_e:PySynth
pysynth_p:PySynth
pysynth_s:PySynth
pysynth_samp:PySynth
pythongettext:python_gettext
pythonjsonlogger:python_json_logger
pyutilib:PyUtilib
pywintypes:pywin32
pyximport:Cython
qs:qserve
quadtree:python_geohash
@ -1010,7 +980,6 @@ ruamel:ruamel.base
s2repoze:pysaml2
saga:saga_python
saml2:pysaml2
samtranslator:aws-sam-translator
sass:libsass
sassc:libsass
sasstests:libsass
@ -1034,12 +1003,10 @@ singleton:pysingleton
sittercommon:cerebrod
skbio:scikit_bio
sklearn:scikit_learn
slack:slackclient
slugify:unicode_slugify
slugify:python-slugify
smarkets:smk_python_sdk
snappy:ctypes_snappy
socketio:python-socketio
socketio:gevent_socketio
socketserver:pies2overrides
sockjs:sockjs_tornado
socks:SocksiPy_branch
@ -1068,7 +1035,6 @@ tasksitter:cerebrod
tastypie:django_tastypie
teamcity:teamcity_messages
telebot:pyTelegramBotAPI
telegram:python-telegram-bot
tempita:Tempita
tenjin:Tenjin
termstyle:python_termstyle
@ -1153,4 +1119,4 @@ z3c:z3c.relationfield
z3c:z3c.traverser
z3c:z3c.zcmlhook
zmq:pyzmq
zopyx:zopyx.textindexng3
zopyx:zopyx.textindexng3

597
pipreqs/pipreqs.py Normal file → Executable file
View File

@ -1,48 +1,29 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pipreqs - Generate pip requirements.txt file based on imports
Usage:
pipreqs [options] [<path>]
Arguments:
<path> The path to the directory containing the application
files for which a requirements file should be
generated (defaults to the current working
directory).
pipreqs [options] <path>
Options:
--use-local Use ONLY local package info instead of querying PyPI.
--pypi-server <url> Use custom PyPi server.
--proxy <url> Use Proxy, parameter will be passed to requests
library. You can also just set the environments
parameter in your terminal:
--use-local Use ONLY local package info instead of querying PyPI
--pypi-server Use custom PyPi server
--proxy Use Proxy, parameter will be passed to requests library. You can also just set the
environments parameter in your terminal:
$ export HTTP_PROXY="http://10.10.1.10:3128"
$ export HTTPS_PROXY="https://10.10.1.10:1080"
--debug Print debug information
--ignore <dirs>... Ignore extra directories, each separated by a comma
--ignore-errors Ignore errors while scanning files
--no-follow-links Do not follow symbolic links in the project
--encoding <charset> Use encoding parameter for file open
--savepath <file> Save the list of requirements in the given file
--print Output the list of requirements in the standard
output
--force Overwrite existing requirements.txt
--diff <file> Compare modules in requirements.txt to project
imports
--clean <file> Clean up requirements.txt by removing modules
that are not imported in project
--mode <scheme> Enables dynamic versioning with <compat>,
<gt> or <no-pin> schemes.
<compat> | e.g. Flask~=1.1.2
<gt> | e.g. Flask>=1.1.2
<no-pin> | e.g. Flask
--scan-notebooks Look for imports in jupyter notebook files.
"""
from contextlib import contextmanager
from __future__ import print_function, absolute_import
import os
import sys
import re
import logging
import codecs
import ast
import traceback
from docopt import docopt
@ -52,67 +33,23 @@ from yarg.exceptions import HTTPError
from pipreqs import __version__
REGEXP = [re.compile(r"^import (.+)$"), re.compile(r"^from ((?!\.+).*?) import (?:.*)$")]
DEFAULT_EXTENSIONS = [".py", ".pyw"]
REGEXP = [
re.compile(r'^import (.+)$'),
re.compile(r'^from ((?!\.+).*?) import (?:.*)$')
]
scan_noteboooks = False
if sys.version_info[0] > 2:
open_func = open
else:
open_func = codecs.open
class NbconvertNotInstalled(ImportError):
default_message = (
"In order to scan jupyter notebooks, please install the nbconvert and ipython libraries"
)
def __init__(self, message=default_message):
super().__init__(message)
@contextmanager
def _open(filename=None, mode="r"):
"""Open a file or ``sys.stdout`` depending on the provided filename.
Args:
filename (str): The path to the file that should be opened. If
``None`` or ``'-'``, ``sys.stdout`` or ``sys.stdin`` is
returned depending on the desired mode. Defaults to ``None``.
mode (str): The mode that should be used to open the file.
Yields:
A file handle.
"""
if not filename or filename == "-":
if not mode or "r" in mode:
file = sys.stdin
elif "w" in mode:
file = sys.stdout
else:
raise ValueError("Invalid mode for file: {}".format(mode))
else:
file = open(filename, mode)
try:
yield file
finally:
if file not in (sys.stdin, sys.stdout):
file.close()
def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links=True, ignore_errors=False):
def get_all_imports(path, encoding=None, extra_ignore_dirs=None):
imports = set()
raw_imports = set()
candidates = []
ignore_dirs = [
".hg",
".svn",
".git",
".tox",
"__pycache__",
"env",
"venv",
".venv",
".ipynb_checkpoints",
]
ignore_errors = False
ignore_dirs = [".hg", ".svn", ".git", "__pycache__", "env", "venv"]
if extra_ignore_dirs:
ignore_dirs_parsed = []
@ -120,111 +57,65 @@ def get_all_imports(path, encoding="utf-8", extra_ignore_dirs=None, follow_links
ignore_dirs_parsed.append(os.path.basename(os.path.realpath(e)))
ignore_dirs.extend(ignore_dirs_parsed)
extensions = get_file_extensions()
walk = os.walk(path, followlinks=follow_links)
for root, dirs, files in walk:
for root, dirs, files in os.walk(path):
dirs[:] = [d for d in dirs if d not in ignore_dirs]
candidates.append(os.path.basename(root))
py_files = [file for file in files if file_ext_is_allowed(file, DEFAULT_EXTENSIONS)]
candidates.extend([os.path.splitext(filename)[0] for filename in py_files])
files = [fn for fn in files if file_ext_is_allowed(fn, extensions)]
files = [fn for fn in files if os.path.splitext(fn)[1] == ".py"]
candidates += [os.path.splitext(fn)[0] for fn in files]
for file_name in files:
file_name = os.path.join(root, file_name)
with open_func(os.path.join(root, file_name), "r", encoding=encoding) as f:
contents = f.read()
try:
tree = ast.parse(contents)
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for subnode in node.names:
raw_imports.add(subnode.name)
elif isinstance(node, ast.ImportFrom):
raw_imports.add(node.module)
except Exception as exc:
if ignore_errors:
traceback.print_exc(exc)
logging.warn("Failed on file: %s" % os.path.join(root, file_name))
continue
else:
logging.error("Failed on file: %s" % os.path.join(root, file_name))
raise exc
try:
contents = read_file_content(file_name, encoding)
tree = ast.parse(contents)
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for subnode in node.names:
raw_imports.add(subnode.name)
elif isinstance(node, ast.ImportFrom):
raw_imports.add(node.module)
except Exception as exc:
if ignore_errors:
traceback.print_exc()
logging.warn("Failed on file: %s" % file_name)
continue
else:
logging.error("Failed on file: %s" % file_name)
raise exc
# Clean up imports
for name in [n for n in raw_imports if n]:
# Sanity check: Name could have been None if the import
# statement was as ``from . import X``
# Sanity check: Name could have been None if the import statement was as from . import X
# Cleanup: We only want to first part of the import.
# Ex: from django.conf --> django.conf. But we only want django
# as an import.
cleaned_name, _, _ = name.partition(".")
# Ex: from django.conf --> django.conf. But we only want django as an import
cleaned_name, _, _ = name.partition('.')
imports.add(cleaned_name)
packages = imports - (set(candidates) & imports)
logging.debug("Found packages: {0}".format(packages))
packages = set(imports) - set(set(candidates) & set(imports))
logging.debug('Found packages: {0}'.format(packages))
with open(join("stdlib"), "r") as f:
data = {x.strip() for x in f}
return list(packages - data)
data = [x.strip() for x in f.readlines()]
return sorted(list(set(packages) - set(data)))
def get_file_extensions():
return DEFAULT_EXTENSIONS + [".ipynb"] if scan_noteboooks else DEFAULT_EXTENSIONS
def filter_line(l):
return len(l) > 0 and l[0] != "#"
def read_file_content(file_name: str, encoding="utf-8"):
if file_ext_is_allowed(file_name, DEFAULT_EXTENSIONS):
with open(file_name, "r", encoding=encoding) as f:
contents = f.read()
elif file_ext_is_allowed(file_name, [".ipynb"]) and scan_noteboooks:
contents = ipynb_2_py(file_name, encoding=encoding)
return contents
def file_ext_is_allowed(file_name, acceptable):
return os.path.splitext(file_name)[1] in acceptable
def ipynb_2_py(file_name, encoding="utf-8"):
"""
Args:
file_name (str): notebook file path to parse as python script
encoding (str): encoding of file
Returns:
str: parsed string
"""
exporter = PythonExporter()
(body, _) = exporter.from_filename(file_name)
return body.encode(encoding)
def generate_requirements_file(path, imports, symbol):
with _open(path, "w") as out_file:
logging.debug(
"Writing {num} requirements: {imports} to {file}".format(
num=len(imports), file=path, imports=", ".join([x["name"] for x in imports])
)
)
fmt = "{name}" + symbol + "{version}"
out_file.write(
"\n".join(
fmt.format(**item) if item["version"] else "{name}".format(**item)
for item in imports
)
+ "\n"
)
def output_requirements(imports, symbol):
generate_requirements_file("-", imports, symbol)
def generate_requirements_file(path, imports):
with open(path, "w") as out_file:
logging.debug('Writing {num} requirements: {imports} to {file}'.format(
num=len(imports),
file=path,
imports=", ".join([x['name'] for x in imports])
))
fmt = '{name} == {version}'
out_file.write('\n'.join(fmt.format(**item)
for item in imports) + '\n')
def get_imports_info(imports, pypi_server="https://pypi.python.org/pypi/", proxy=None):
@ -232,115 +123,68 @@ def get_imports_info(imports, pypi_server="https://pypi.python.org/pypi/", proxy
for item in imports:
try:
logging.warning(
'Import named "%s" not found locally. ' "Trying to resolve it at the PyPI server.",
item,
)
response = requests.get("{0}{1}/json".format(pypi_server, item), proxies=proxy)
if response.status_code == 200:
if hasattr(response.content, "decode"):
if hasattr(response.content, 'decode'):
data = json2package(response.content.decode())
else:
data = json2package(response.content)
elif response.status_code >= 300:
raise HTTPError(status_code=response.status_code, reason=response.reason)
raise HTTPError(status_code=response.status_code,
reason=response.reason)
except HTTPError:
logging.warning('Package "%s" does not exist or network problems', item)
logging.debug(
'Package %s does not exist or network problems', item)
continue
logging.warning(
'Import named "%s" was resolved to "%s:%s" package (%s).\n'
"Please, verify manually the final list of requirements.txt "
"to avoid possible dependency confusions.",
item,
data.name,
data.latest_release_id,
data.pypi_url,
)
result.append({"name": item, "version": data.latest_release_id})
result.append({'name': item, 'version': data.latest_release_id})
return result
def get_locally_installed_packages(encoding="utf-8"):
packages = []
def get_locally_installed_packages(encoding=None):
packages = {}
ignore = ["tests", "_tests", "egg", "EGG", "info"]
for path in sys.path:
for root, dirs, files in os.walk(path):
for item in files:
if "top_level" in item:
item = os.path.join(root, item)
with open(item, "r", encoding=encoding) as f:
with open_func(os.path.join(root, item), "r", encoding=encoding) as f:
package = root.split(os.sep)[-1].split("-")
try:
top_level_modules = f.read().strip().split("\n")
except: # NOQA
# TODO: What errors do we intend to suppress here?
package_import = f.read().strip().split("\n")
except:
continue
# filter off explicitly ignored top-level modules
# such as test, egg, etc.
filtered_top_level_modules = list()
for module in top_level_modules:
if (module not in ignore) and (package[0] not in ignore):
# append exported top level modules to the list
filtered_top_level_modules.append(module)
version = None
if len(package) > 1:
version = package[1].replace(".dist", "").replace(".egg", "")
# append package: top_level_modules pairs
# instead of top_level_module: package pairs
packages.append(
{
"name": package[0],
"version": version,
"exports": filtered_top_level_modules,
}
)
for i_item in package_import:
if ((i_item not in ignore) and
(package[0] not in ignore)):
packages[i_item] = {
'version': package[1].replace(".dist", "").replace(".egg",""),
'name': package[0]
}
return packages
def get_import_local(imports, encoding="utf-8"):
def get_import_local(imports, encoding=None):
local = get_locally_installed_packages()
result = []
for item in imports:
# search through local packages
for package in local:
# if candidate import name matches export name
# or candidate import name equals to the package name
# append it to the result
if item in package["exports"] or item == package["name"]:
result.append(package)
# removing duplicates of package/version
# had to use second method instead of the previous one,
# because we have a list in the 'exports' field
# https://stackoverflow.com/questions/9427163/remove-duplicate-dict-in-list-in-python
result_unique = [i for n, i in enumerate(result) if i not in result[n + 1:]]
return result_unique
if item.lower() in local:
result.append(local[item.lower()])
return result
def get_pkg_names(pkgs):
"""Get PyPI package names from a list of imports.
Args:
pkgs (List[str]): List of import names.
Returns:
List[str]: The corresponding PyPI package names.
"""
result = set()
result = []
with open(join("mapping"), "r") as f:
data = dict(x.strip().split(":") for x in f)
for pkg in pkgs:
# Look up the mapped requirement. If a mapping isn't found,
# simply use the package name.
result.add(data.get(pkg, pkg))
# Return a sorted list for backward compatibility.
return sorted(result, key=lambda s: s.lower())
data = [x.strip().split(":") for x in f.readlines()]
for pkg in pkgs:
toappend = pkg
for item in data:
if item[0] == pkg:
toappend = item[1]
break
if toappend not in result:
result.append(toappend)
return result
def get_name_without_alias(name):
@ -348,197 +192,23 @@ def get_name_without_alias(name):
match = REGEXP[0].match(name.strip())
if match:
name = match.groups(0)[0]
return name.partition(" as ")[0].partition(".")[0].strip()
return name.partition(' as ')[0].partition('.')[0].strip()
def join(f):
return os.path.join(os.path.dirname(__file__), f)
def parse_requirements(file_):
"""Parse a requirements formatted file.
Traverse a string until a delimiter is detected, then split at said
delimiter, get module name by element index, create a dict consisting of
module:version, and add dict to list of parsed modules.
If file ´file_´ is not found in the system, the program will print a
helpful message and end its execution immediately.
Args:
file_: File to parse.
Raises:
OSerror: If there's any issues accessing the file.
Returns:
list: The contents of the file, excluding comments.
"""
modules = []
# For the dependency identifier specification, see
# https://www.python.org/dev/peps/pep-0508/#complete-grammar
delim = ["<", ">", "=", "!", "~"]
try:
f = open(file_, "r")
except FileNotFoundError:
print(f"File {file_} was not found. Please, fix it and run again.")
sys.exit(1)
except OSError as error:
logging.error(f"There was an error opening the file {file_}: {str(error)}")
raise error
else:
try:
data = [x.strip() for x in f.readlines() if x != "\n"]
finally:
f.close()
data = [x for x in data if x[0].isalpha()]
for x in data:
# Check for modules w/o a specifier.
if not any([y in x for y in delim]):
modules.append({"name": x, "version": None})
for y in x:
if y in delim:
module = x.split(y)
module_name = module[0]
module_version = module[-1].replace("=", "")
module = {"name": module_name, "version": module_version}
if module not in modules:
modules.append(module)
break
return modules
def compare_modules(file_, imports):
"""Compare modules in a file to imported modules in a project.
Args:
file_ (str): File to parse for modules to be compared.
imports (tuple): Modules being imported in the project.
Returns:
set: The modules not imported in the project, but do exist in the
specified file.
"""
modules = parse_requirements(file_)
imports = [imports[i]["name"] for i in range(len(imports))]
modules = [modules[i]["name"] for i in range(len(modules))]
modules_not_imported = set(modules) - set(imports)
return modules_not_imported
def diff(file_, imports):
"""Display the difference between modules in a file and imported modules.""" # NOQA
modules_not_imported = compare_modules(file_, imports)
logging.info(
"The following modules are in {} but do not seem to be imported: "
"{}".format(file_, ", ".join(x for x in modules_not_imported))
)
def clean(file_, imports):
"""Remove modules that aren't imported in project from file."""
modules_not_imported = compare_modules(file_, imports)
if len(modules_not_imported) == 0:
logging.info("Nothing to clean in " + file_)
return
re_remove = re.compile("|".join(modules_not_imported))
to_write = []
try:
f = open(file_, "r+")
except OSError:
logging.error("Failed on file: {}".format(file_))
raise
else:
try:
for i in f.readlines():
if re_remove.match(i) is None:
to_write.append(i)
f.seek(0)
f.truncate()
for i in to_write:
f.write(i)
finally:
f.close()
logging.info("Successfully cleaned up requirements in " + file_)
def dynamic_versioning(scheme, imports):
"""Enables dynamic versioning with <compat>, <gt> or <non-pin> schemes."""
if scheme == "no-pin":
imports = [{"name": item["name"], "version": ""} for item in imports]
symbol = ""
elif scheme == "gt":
symbol = ">="
elif scheme == "compat":
symbol = "~="
return imports, symbol
def handle_scan_noteboooks():
if not scan_noteboooks:
logging.info("Not scanning for jupyter notebooks.")
return
try:
global PythonExporter
from nbconvert import PythonExporter
except ImportError:
raise NbconvertNotInstalled()
def init(args):
global scan_noteboooks
encoding = args.get("--encoding")
extra_ignore_dirs = args.get("--ignore")
follow_links = not args.get("--no-follow-links")
ignore_errors = args.get("--ignore-errors")
scan_noteboooks = args.get("--scan-notebooks", False)
handle_scan_noteboooks()
input_path = args["<path>"]
if encoding is None:
encoding = "utf-8"
if input_path is None:
input_path = os.path.abspath(os.curdir)
encoding = args.get('--encoding')
extra_ignore_dirs = args.get('--ignore')
if extra_ignore_dirs:
extra_ignore_dirs = extra_ignore_dirs.split(",")
extra_ignore_dirs = extra_ignore_dirs.split(',')
path = (
args["--savepath"] if args["--savepath"] else os.path.join(input_path, "requirements.txt")
)
if (
not args["--print"]
and not args["--savepath"]
and not args["--force"]
and os.path.exists(path)
):
logging.warning("requirements.txt already exists, " "use --force to overwrite it")
return
candidates = get_all_imports(
input_path,
encoding=encoding,
extra_ignore_dirs=extra_ignore_dirs,
follow_links=follow_links,
ignore_errors=ignore_errors,
)
candidates = get_all_imports(args['<path>'],
encoding=encoding,
extra_ignore_dirs = extra_ignore_dirs)
candidates = get_pkg_names(candidates)
logging.debug("Found imports: " + ", ".join(candidates))
pypi_server = "https://pypi.python.org/pypi/"
@ -547,66 +217,37 @@ def init(args):
pypi_server = args["--pypi-server"]
if args["--proxy"]:
proxy = {"http": args["--proxy"], "https": args["--proxy"]}
proxy = {'http': args["--proxy"], 'https': args["--proxy"]}
if args["--use-local"]:
logging.debug("Getting package information ONLY from local installation.")
logging.debug(
"Getting package information ONLY from local installation.")
imports = get_import_local(candidates, encoding=encoding)
else:
logging.debug("Getting packages information from Local/PyPI")
local = get_import_local(candidates, encoding=encoding)
# Get packages that were not found locally
difference = [x for x in candidates
if x.lower() not in [z['name'].lower() for z in local]]
imports = local + get_imports_info(difference,
proxy=proxy,
pypi_server=pypi_server)
# check if candidate name is found in
# the list of exported modules, installed locally
# and the package name is not in the list of local module names
# it add to difference
difference = [
x
for x in candidates
if
# aggregate all export lists into one
# flatten the list
# check if candidate is in exports
x.lower() not in [y for x in local for y in x["exports"]] and
# check if candidate is package names
x.lower() not in [x["name"] for x in local]
]
path = (args["--savepath"] if args["--savepath"] else
os.path.join(args['<path>'], "requirements.txt"))
imports = local + get_imports_info(difference, proxy=proxy, pypi_server=pypi_server)
# sort imports based on lowercase name of package, similar to `pip freeze`.
imports = sorted(imports, key=lambda x: x["name"].lower())
if args["--diff"]:
diff(args["--diff"], imports)
if not args["--savepath"] and not args["--force"] and os.path.exists(path):
logging.warning("Requirements.txt already exists, "
"use --force to overwrite it")
return
if args["--clean"]:
clean(args["--clean"], imports)
return
if args["--mode"]:
scheme = args.get("--mode")
if scheme in ["compat", "gt", "no-pin"]:
imports, symbol = dynamic_versioning(scheme, imports)
else:
raise ValueError(
"Invalid argument for mode flag, " "use 'compat', 'gt' or 'no-pin' instead"
)
else:
symbol = "=="
if args["--print"]:
output_requirements(imports, symbol)
logging.info("Successfully output requirements")
else:
generate_requirements_file(path, imports, symbol)
logging.info("Successfully saved requirements file in " + path)
generate_requirements_file(path, imports)
logging.info("Successfully saved requirements file in " + path)
def main(): # pragma: no cover
args = docopt(__doc__, version=__version__)
log_level = logging.DEBUG if args["--debug"] else logging.INFO
logging.basicConfig(level=log_level, format="%(levelname)s: %(message)s")
log_level = logging.DEBUG if args['--debug'] else logging.INFO
logging.basicConfig(level=log_level, format='%(levelname)s: %(message)s')
try:
init(args)
@ -614,5 +255,5 @@ def main(): # pragma: no cover
sys.exit(0)
if __name__ == "__main__":
if __name__ == '__main__':
main() # pragma: no cover

File diff suppressed because it is too large Load Diff

2021
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +0,0 @@
[virtualenvs]
prefer-active-python = true

View File

@ -1,53 +0,0 @@
[project]
name = "pipreqs"
version = "0.5.0"
description = "Pip requirements.txt generator based on imports in project"
authors = [
{ name = "Vadim Kravcenko", email = "vadim.kravcenko@gmail.com" }
]
maintainers = [
{name = "Jonas Eschle", email = "jonas.eschle@gmail.com"}
]
license = "Apache-2.0"
readme = "README.rst"
packages = [{ include = "pipreqs" }]
repository = "https://github.com/bndr/pipreqs"
keywords = ["pip", "requirements", "imports"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
requires-python = ">=3.9, <3.14"
dependencies = [
"yarg>=0.1.9",
"docopt>=0.6.2",
"nbconvert>=7.11.0",
"ipython>=8.12.3",
]
[project.optional-dependencies]
dev = [
"flake8>=6.1.0",
"tox>=4.11.3",
"coverage>=7.3.2",
"sphinx>=7.2.6;python_version>='3.9'",
]
[tool.poetry.group.dev.dependencies] # for legacy usage
flake8 = "^6.1.0"
tox = "^4.11.3"
coverage = "^7.3.2"
sphinx = { version = "^7.2.6", python = ">=3.9" }
[project.scripts]
pipreqs = "pipreqs.pipreqs:main"
[build-system]
requires = ["poetry-core>=2.0.0,<3.0.0"]
build-backend = "poetry.core.masonry.api"

3
requirements.txt Normal file
View File

@ -0,0 +1,3 @@
wheel==0.23.0
Yarg==0.1.9
docopt==0.6.2

2
setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[wheel]
universal = 1

60
setup.py Executable file
View File

@ -0,0 +1,60 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from pipreqs import __version__
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
'docopt', 'yarg'
]
setup(
name='pipreqs',
version=__version__,
description="Pip requirements.txt generator based on imports in project",
long_description=readme + '\n\n' + history,
author="Vadim Kravcenko",
author_email='vadim.kravcenko@gmail.com',
url='https://github.com/bndr/pipreqs',
packages=[
'pipreqs',
],
package_dir={'pipreqs':
'pipreqs'},
include_package_data=True,
package_data={'': ['stdlib','mapping']},
install_requires=requirements,
license="Apache License",
zip_safe=False,
keywords='pip requirements imports',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
entry_points={
'console_scripts': [
'pipreqs=pipreqs.pipreqs:main',
],
},
)

View File

View File

@ -1,3 +0,0 @@
pandas==2.0.0
numpy>=1.2.3
torch<4.0.0

View File

@ -1,4 +0,0 @@
numpy
pandas==2.0.0
tensorflow
torch<4.0.0

View File

@ -1,3 +0,0 @@
pandas
tensorflow
torch

View File

@ -31,10 +31,6 @@ from pyflakes.test.test_imports import Test as TestImports
# Nose
from nose.importer import Importer, add_path, remove_path # loader.py
# see issue #88
import analytics
import flask_seasurf
import atexit
from __future__ import print_function
from docopt import docopt

View File

@ -1,65 +0,0 @@
"""unused import"""
# pylint: disable=undefined-all-variable, import-error, no-absolute-import, too-few-public-methods, missing-docstring
import xml.etree # [unused-import]
import xml.sax # [unused-import]
import os.path as test # [unused-import]
from sys import argv as test2 # [unused-import]
from sys import flags # [unused-import]
# +1:[unused-import,unused-import]
from collections import deque, OrderedDict, Counter
# All imports above should be ignored
import requests # [unused-import]
# setuptools
import zipimport # command/easy_install.py
# twisted
from importlib import invalidate_caches # python/test/test_deprecate.py
# astroid
import zipimport # manager.py
# IPython
from importlib.machinery import all_suffixes # core/completerlib.py
import importlib # html/notebookapp.py
from IPython.utils.importstring import import_item # Many files
# pyflakes
# test/test_doctests.py
from pyflakes.test.test_imports import Test as TestImports
# Nose
from nose.importer import Importer, add_path, remove_path # loader.py
# see issue #88
import analytics
import flask_seasurf
import atexit
from __future__ import print_function
from docopt import docopt
import curses, logging, sqlite3
import logging
import os
import sqlite3
import time
import sys
import signal
import bs4
import nonexistendmodule
import boto as b, peewee as p
# import django
import flask.ext.somext # # #
# from sqlalchemy import model
try:
import ujson as json
except ImportError:
import json
import models
def main():
pass
import after_method_is_valid_even_if_not_pep8

View File

@ -1,65 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Magic test"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%automagic true"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ls -la\n",
"logstate"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ls -la"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%automagic false"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ls -la"
]
}
],
"metadata": {
"language_info": {
"name": "python"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -1,37 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Markdown test\n",
"import sklearn\n",
"\n",
"```python\n",
"import FastAPI\n",
"```"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.1"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@ -1,102 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\"\"\"unused import\"\"\"\n",
"# pylint: disable=undefined-all-variable, import-error, no-absolute-import, too-few-public-methods, missing-docstring\n",
"import xml.etree # [unused-import]\n",
"import xml.sax # [unused-import]\n",
"import os.path as test # [unused-import]\n",
"from sys import argv as test2 # [unused-import]\n",
"from sys import flags # [unused-import]\n",
"# +1:[unused-import,unused-import]\n",
"from collections import deque, OrderedDict, Counter\n",
"# All imports above should be ignored\n",
"import requests # [unused-import]\n",
"\n",
"# setuptools\n",
"import zipimport # command/easy_install.py\n",
"\n",
"# twisted\n",
"from importlib import invalidate_caches # python/test/test_deprecate.py\n",
"\n",
"# astroid\n",
"import zipimport # manager.py\n",
"# IPython\n",
"from importlib.machinery import all_suffixes # core/completerlib.py\n",
"import importlib # html/notebookapp.py\n",
"\n",
"from IPython.utils.importstring import import_item # Many files\n",
"\n",
"# pyflakes\n",
"# test/test_doctests.py\n",
"from pyflakes.test.test_imports import Test as TestImports\n",
"\n",
"# Nose\n",
"from nose.importer import Importer, add_path, remove_path # loader.py\n",
"\n",
"import atexit\n",
"from __future__ import print_function\n",
"from docopt import docopt\n",
"import curses, logging, sqlite3\n",
"import logging\n",
"import os\n",
"import sqlite3\n",
"import time\n",
"import sys\n",
"import signal\n",
"import bs4\n",
"import nonexistendmodule\n",
"import boto as b, peewee as p\n",
"# import django\n",
"import flask.ext.somext # # #\n",
"from sqlalchemy import model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" import ujson as json\n",
"except ImportError:\n",
" import json\n",
"\n",
"import models\n",
"\n",
"\n",
"def main():\n",
" pass\n",
"\n",
"import after_method_is_valid_even_if_not_pep8"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.1"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

View File

@ -1,5 +0,0 @@
import airflow
import numpy
airflow
numpy

View File

@ -1,3 +0,0 @@
import matplotlib
import pandas
import tensorflow

View File

@ -1,34 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cd ."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.4"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

611
tests/test_pipreqs.py Normal file → Executable file
View File

@ -8,93 +8,35 @@ test_pipreqs
Tests for `pipreqs` module.
"""
from io import StringIO
import logging
from unittest.mock import patch, Mock
import unittest
import os
import requests
import sys
import warnings
from pipreqs import pipreqs
class TestPipreqs(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Disable all logs for not spamming the terminal when running tests.
logging.disable(logging.CRITICAL)
# Specific warning not covered by the above command:
warnings.filterwarnings("ignore", category=DeprecationWarning, module="jupyter_client")
cls.modules = [
"flask",
"requests",
"sqlalchemy",
"docopt",
"boto",
"ipython",
"pyflakes",
"nose",
"analytics",
"flask_seasurf",
"peewee",
"ujson",
"nonexistendmodule",
"bs4",
"after_method_is_valid_even_if_not_pep8",
]
cls.modules2 = ["beautifulsoup4"]
cls.local = ["docopt", "requests", "nose", "pyflakes", "ipython"]
cls.project = os.path.join(os.path.dirname(__file__), "_data")
cls.empty_filepath = os.path.join(cls.project, "empty.txt")
cls.imports_filepath = os.path.join(cls.project, "imports.txt")
cls.imports_no_version_filepath = os.path.join(cls.project, "imports_no_version.txt")
cls.imports_any_version_filepath = os.path.join(cls.project, "imports_any_version.txt")
cls.non_existent_filepath = os.path.join(cls.project, "non_existent_file.txt")
cls.parsed_packages = [
{"name": "pandas", "version": "2.0.0"},
{"name": "numpy", "version": "1.2.3"},
{"name": "torch", "version": "4.0.0"},
]
cls.parsed_packages_no_version = [
{"name": "pandas", "version": None},
{"name": "tensorflow", "version": None},
{"name": "torch", "version": None},
]
cls.parsed_packages_any_version = [
{"name": "numpy", "version": None},
{"name": "pandas", "version": "2.0.0"},
{"name": "tensorflow", "version": None},
{"name": "torch", "version": "4.0.0"},
]
cls.project_clean = os.path.join(os.path.dirname(__file__), "_data_clean")
cls.project_invalid = os.path.join(os.path.dirname(__file__), "_invalid_data")
cls.project_with_ignore_directory = os.path.join(os.path.dirname(__file__), "_data_ignore")
cls.project_with_duplicated_deps = os.path.join(os.path.dirname(__file__), "_data_duplicated_deps")
cls.requirements_path = os.path.join(cls.project, "requirements.txt")
cls.alt_requirement_path = os.path.join(cls.project, "requirements2.txt")
cls.non_existing_filepath = "xpto"
cls.project_with_notebooks = os.path.join(os.path.dirname(__file__), "_data_notebook")
cls.project_with_invalid_notebooks = os.path.join(os.path.dirname(__file__), "_invalid_data_notebook")
cls.python_path_same_imports = os.path.join(os.path.dirname(__file__), "_data/test.py")
cls.notebook_path_same_imports = os.path.join(os.path.dirname(__file__), "_data_notebook/test.ipynb")
def setUp(self):
self.modules = ['flask', 'requests', 'sqlalchemy',
'docopt', 'boto', 'ipython', 'pyflakes', 'nose',
'peewee', 'ujson', 'nonexistendmodule', 'bs4', 'after_method_is_valid_even_if_not_pep8' ]
self.modules2 = ['beautifulsoup4']
self.local = ["docopt", "requests", "nose", 'pyflakes']
self.project = os.path.join(os.path.dirname(__file__), "_data")
self.project_invalid = os.path.join(os.path.dirname(__file__), "_invalid_data")
self.project_with_ignore_directory = os.path.join(os.path.dirname(__file__), "_data_ignore")
self.project_with_duplicated_deps = os.path.join(os.path.dirname(__file__), "_data_duplicated_deps")
self.requirements_path = os.path.join(self.project, "requirements.txt")
self.alt_requirement_path = os.path.join(
self.project, "requirements2.txt")
def test_get_all_imports(self):
imports = pipreqs.get_all_imports(self.project)
self.assertEqual(len(imports), 15)
self.assertEqual(len(imports), 13)
for item in imports:
self.assertTrue(item.lower() in self.modules, "Import is missing: " + item)
self.assertTrue(
item.lower() in self.modules, "Import is missing: " + item)
self.assertFalse("time" in imports)
self.assertFalse("logging" in imports)
self.assertFalse("curses" in imports)
@ -114,119 +56,62 @@ class TestPipreqs(unittest.TestCase):
"""
self.assertRaises(SyntaxError, pipreqs.get_all_imports, self.project_invalid)
def test_ignore_errors(self):
"""
Test that invalid python files do not raise an exception when ignore_errors is True.
"""
imports = pipreqs.get_all_imports(self.project_invalid, ignore_errors=True)
self.assertEqual(len(imports), 0)
def test_get_imports_info(self):
"""
Test to see that the right number of packages were found on PyPI
"""
imports = pipreqs.get_all_imports(self.project)
with_info = pipreqs.get_imports_info(imports)
# Should contain 10 items without the "nonexistendmodule" and
# "after_method_is_valid_even_if_not_pep8"
self.assertEqual(len(with_info), 13)
# Should contain 10 items without the "nonexistendmodule" and "after_method_is_valid_even_if_not_pep8"
self.assertEqual(len(with_info), 10)
for item in with_info:
self.assertTrue(
item["name"].lower() in self.modules,
"Import item appears to be missing " + item["name"],
)
def test_get_pkg_names(self):
pkgs = ["jury", "Japan", "camel", "Caroline"]
actual_output = pipreqs.get_pkg_names(pkgs)
expected_output = ["camel", "Caroline", "Japan", "jury"]
self.assertEqual(actual_output, expected_output)
item['name'].lower() in self.modules,
"Import item appears to be missing " + item['name'])
def test_get_use_local_only(self):
"""
Test without checking PyPI, check to see if names of local
imports matches what we expect
Test without checking PyPI, check to see if names of local imports matches what we expect
- Note even though pyflakes isn't in requirements.txt,
It's added to locals since it is a development dependency
for testing
It's added to locals since it is a development dependency for testing
"""
# should find only docopt and requests
imports_with_info = pipreqs.get_import_local(self.modules)
for item in imports_with_info:
self.assertTrue(item["name"].lower() in self.local)
self.assertTrue(item['name'].lower() in self.local)
def test_init(self):
"""
Test that all modules we will test upon are in requirements file
Test that all modules we will test upon, are in requirements file
"""
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
pipreqs.init({'<path>': self.project, '--savepath': None,
'--use-local': None, '--force': True, '--proxy':None, '--pypi-server':None})
assert os.path.exists(self.requirements_path) == 1
with open(self.requirements_path, "r") as f:
data = f.read().lower()
for item in self.modules[:-3]:
self.assertTrue(item.lower() in data)
# It should be sorted based on names.
data = data.strip().split("\n")
self.assertEqual(data, sorted(data))
def test_init_local_only(self):
"""
Test that items listed in requirements.text are the same
as locals expected
Test that items listed in requirements.text are the same as locals expected
"""
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": True,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
pipreqs.init({'<path>': self.project, '--savepath': None,
'--use-local': True, '--force': True, '--proxy':None, '--pypi-server':None})
assert os.path.exists(self.requirements_path) == 1
with open(self.requirements_path, "r") as f:
data = f.readlines()
for item in data:
item = item.strip().split("==")
item = item.strip().split(" == ")
self.assertTrue(item[0].lower() in self.local)
def test_init_savepath(self):
"""
Test that we can save requirements.txt correctly
to a different path
Test that we can save requiremnts.tt correctly to a different path
"""
pipreqs.init(
{
"<path>": self.project,
"--savepath": self.alt_requirement_path,
"--use-local": None,
"--proxy": None,
"--pypi-server": None,
"--print": False,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
pipreqs.init({'<path>': self.project, '--savepath':
self.alt_requirement_path, '--use-local': None, '--proxy':None, '--pypi-server':None})
assert os.path.exists(self.alt_requirement_path) == 1
with open(self.alt_requirement_path, "r") as f:
data = f.read().lower()
@ -237,25 +122,12 @@ class TestPipreqs(unittest.TestCase):
def test_init_overwrite(self):
"""
Test that if requiremnts.txt exists, it will not be
automatically overwritten
Test that if requiremnts.txt exists, it will not automatically be overwritten
"""
with open(self.requirements_path, "w") as f:
f.write("should_not_be_overwritten")
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--use-local": None,
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--print": False,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
pipreqs.init({'<path>': self.project, '--savepath': None,
'--use-local': None, '--force': None, '--proxy':None, '--pypi-server':None})
assert os.path.exists(self.requirements_path) == 1
with open(self.requirements_path, "r") as f:
data = f.read().lower()
@ -263,421 +135,40 @@ class TestPipreqs(unittest.TestCase):
def test_get_import_name_without_alias(self):
"""
Test that function get_name_without_alias()
will work on a string.
- Note: This isn't truly needed when pipreqs is walking
the AST to find imports
Test that function get_name_without_alias() will work on a string.
- Note: This isn't truly needed when pipreqs is walking the AST to find imports
"""
import_name_with_alias = "requests as R"
expected_import_name_without_alias = "requests"
import_name_without_aliases = pipreqs.get_name_without_alias(import_name_with_alias)
self.assertEqual(import_name_without_aliases, expected_import_name_without_alias)
import_name_without_aliases = pipreqs.get_name_without_alias(
import_name_with_alias)
self.assertEqual(
import_name_without_aliases, expected_import_name_without_alias)
def test_custom_pypi_server(self):
"""
Test that trying to get a custom pypi sever fails correctly
"""
self.assertRaises(
requests.exceptions.MissingSchema,
pipreqs.init,
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": "nonexistent",
},
)
self.assertRaises(requests.exceptions.MissingSchema, pipreqs.init, {'<path>': self.project, '--savepath': None,
'--use-local': None, '--force': True, '--proxy': None, '--pypi-server': 'nonexistent'})
def test_ignored_directory(self):
"""
Test --ignore parameter
"""
pipreqs.init(
{
"<path>": self.project_with_ignore_directory,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--ignore": ".ignored_dir,.ignore_second",
"--diff": None,
"--clean": None,
"--mode": None,
{'<path>': self.project_with_ignore_directory, '--savepath': None,
'--use-local': None, '--force': True,
'--proxy':None,
'--pypi-server':None,
'--ignore':'.ignored_dir,.ignore_second'
}
)
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.read().lower()
for item in ["click", "getpass"]:
for item in ['click', 'getpass']:
self.assertFalse(item.lower() in data)
def test_dynamic_version_no_pin_scheme(self):
"""
Test --mode=no-pin
"""
pipreqs.init(
{
"<path>": self.project_with_ignore_directory,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": "no-pin",
}
)
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.read().lower()
for item in ["beautifulsoup4", "boto"]:
self.assertTrue(item.lower() in data)
def test_dynamic_version_gt_scheme(self):
"""
Test --mode=gt
"""
pipreqs.init(
{
"<path>": self.project_with_ignore_directory,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": "gt",
}
)
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.readlines()
for item in data:
symbol = ">="
message = "symbol is not in item"
self.assertIn(symbol, item, message)
def test_dynamic_version_compat_scheme(self):
"""
Test --mode=compat
"""
pipreqs.init(
{
"<path>": self.project_with_ignore_directory,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": "compat",
}
)
with open(os.path.join(self.project_with_ignore_directory, "requirements.txt"), "r") as f:
data = f.readlines()
for item in data:
symbol = "~="
message = "symbol is not in item"
self.assertIn(symbol, item, message)
def test_clean(self):
"""
Test --clean parameter
"""
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": self.requirements_path,
"--mode": "non-pin",
}
)
with open(self.requirements_path, "r") as f:
data = f.read().lower()
for item in self.modules[:-3]:
self.assertTrue(item.lower() in data)
def test_clean_with_imports_to_clean(self):
"""
Test --clean parameter when there are imports to clean
"""
cleaned_module = "sqlalchemy"
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
assert os.path.exists(self.requirements_path) == 1
pipreqs.init(
{
"<path>": self.project_clean,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": self.requirements_path,
"--mode": "non-pin",
}
)
with open(self.requirements_path, "r") as f:
data = f.read().lower()
self.assertTrue(cleaned_module not in data)
def test_compare_modules(self):
test_cases = [
(self.empty_filepath, [], set()), # both empty
(self.empty_filepath, self.parsed_packages, set()), # only file empty
(
self.imports_filepath,
[],
set(package["name"] for package in self.parsed_packages),
), # only imports empty
(self.imports_filepath, self.parsed_packages, set()), # no difference
(
self.imports_filepath,
self.parsed_packages[1:],
set([self.parsed_packages[0]["name"]]),
), # common case
]
for test_case in test_cases:
with self.subTest(test_case):
filename, imports, expected_modules_not_imported = test_case
modules_not_imported = pipreqs.compare_modules(filename, imports)
self.assertSetEqual(modules_not_imported, expected_modules_not_imported)
def test_output_requirements(self):
"""
Test --print parameter
It should print to stdout the same content as requeriments.txt
"""
capturedOutput = StringIO()
sys.stdout = capturedOutput
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": True,
"--use-local": None,
"--force": None,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
pipreqs.init(
{
"<path>": self.project,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
with open(self.requirements_path, "r") as f:
file_content = f.read().lower()
stdout_content = capturedOutput.getvalue().lower()
self.assertTrue(file_content == stdout_content)
def test_import_notebooks(self):
"""
Test the function get_all_imports() using .ipynb file
"""
self.mock_scan_notebooks()
imports = pipreqs.get_all_imports(self.project_with_notebooks)
for item in imports:
self.assertTrue(item.lower() in self.modules, "Import is missing: " + item)
not_desired_imports = ["time", "logging", "curses", "__future__", "django", "models", "FastAPI", "sklearn"]
for not_desired_import in not_desired_imports:
self.assertFalse(
not_desired_import in imports,
f"{not_desired_import} was imported, but it should not have been."
)
def test_invalid_notebook(self):
"""
Test that invalid notebook files cannot be imported.
"""
self.mock_scan_notebooks()
self.assertRaises(SyntaxError, pipreqs.get_all_imports, self.project_with_invalid_notebooks)
def test_ipynb_2_py(self):
"""
Test the function ipynb_2_py() which converts .ipynb file to .py format
"""
python_imports = pipreqs.get_all_imports(self.python_path_same_imports)
notebook_imports = pipreqs.get_all_imports(self.notebook_path_same_imports)
self.assertEqual(python_imports, notebook_imports)
def test_file_ext_is_allowed(self):
"""
Test the function file_ext_is_allowed()
"""
self.assertTrue(pipreqs.file_ext_is_allowed("main.py", [".py"]))
self.assertTrue(pipreqs.file_ext_is_allowed("main.py", [".py", ".ipynb"]))
self.assertFalse(pipreqs.file_ext_is_allowed("main.py", [".ipynb"]))
def test_parse_requirements(self):
"""
Test parse_requirements function
"""
test_cases = [
(self.empty_filepath, []), # empty file
(self.imports_filepath, self.parsed_packages), # imports with versions
(
self.imports_no_version_filepath,
self.parsed_packages_no_version,
), # imports without versions
(
self.imports_any_version_filepath,
self.parsed_packages_any_version,
), # imports with and without versions
]
for test in test_cases:
with self.subTest(test):
filename, expected_parsed_requirements = test
parsed_requirements = pipreqs.parse_requirements(filename)
self.assertListEqual(parsed_requirements, expected_parsed_requirements)
@patch("sys.exit")
def test_parse_requirements_handles_file_not_found(self, exit_mock):
captured_output = StringIO()
sys.stdout = captured_output
# This assertion is needed, because since "sys.exit" is mocked, the program won't end,
# and the code that is after the except block will be run
with self.assertRaises(UnboundLocalError):
pipreqs.parse_requirements(self.non_existing_filepath)
exit_mock.assert_called_once_with(1)
printed_text = captured_output.getvalue().strip()
sys.stdout = sys.__stdout__
self.assertEqual(printed_text, "File xpto was not found. Please, fix it and run again.")
def test_ignore_notebooks(self):
"""
Test if notebooks are ignored when the scan-notebooks parameter is False
"""
notebook_requirement_path = os.path.join(self.project_with_notebooks, "requirements.txt")
pipreqs.init(
{
"<path>": self.project_with_notebooks,
"--savepath": None,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--print": False,
"--diff": None,
"--clean": None,
"--mode": None,
"--scan-notebooks": False,
}
)
assert os.path.exists(notebook_requirement_path) == 1
assert os.path.getsize(notebook_requirement_path) == 1 # file only has a "\n", meaning it's empty
def test_pipreqs_get_imports_from_pyw_file(self):
pyw_test_dirpath = os.path.join(os.path.dirname(__file__), "_data_pyw")
requirements_path = os.path.join(pyw_test_dirpath, "requirements.txt")
pipreqs.init(
{
"<path>": pyw_test_dirpath,
"--savepath": None,
"--print": False,
"--use-local": None,
"--force": True,
"--proxy": None,
"--pypi-server": None,
"--diff": None,
"--clean": None,
"--mode": None,
}
)
self.assertTrue(os.path.exists(requirements_path))
expected_imports = [
"airflow",
"matplotlib",
"numpy",
"pandas",
"tensorflow",
]
with open(requirements_path, "r") as f:
imports_data = f.read().lower()
for _import in expected_imports:
self.assertTrue(
_import.lower() in imports_data,
f"'{_import}' import was expected but not found.",
)
os.remove(requirements_path)
def mock_scan_notebooks(self):
pipreqs.scan_noteboooks = Mock(return_value=True)
pipreqs.handle_scan_noteboooks()
def tearDown(self):
"""
@ -693,5 +184,5 @@ class TestPipreqs(unittest.TestCase):
pass
if __name__ == "__main__":
if __name__ == '__main__':
unittest.main()

30
tox.ini
View File

@ -1,31 +1,9 @@
[tox]
isolated_build = true
envlist = py39, py310, py311, py312, py313, pypy3, flake8
[gh-actions]
python =
3.9: py39
3.10: py310
3.11: py311
3.12: py312
3.13: py313
pypy-3.10: pypy3
envlist = py26, py27, py33, py34
[testenv]
setenv =
PYTHONPATH = {toxinidir}:{toxinidir}/pipreqs
commands =
python -m unittest discover
[testenv:flake8]
deps = flake8
commands = flake8 pipreqs tests
[flake8]
exclude =
tests/_data/
tests/_data_clean/
tests/_data_duplicated_deps/
tests/_data_ignore/
tests/_invalid_data/
max-line-length = 120
commands = python setup.py test
deps =
-r{toxinidir}/requirements.txt