te')); return $arr; } /* 遍历用户所有主题 * @param $uid 用户ID * @param int $page 页数 * @param int $pagesize 每页记录条数 * @param bool $desc 排序方式 TRUE降序 FALSE升序 * @param string $key 返回的数组用那一列的值作为 key * @param array $col 查询哪些列 */ function thread_tid_find_by_uid($uid, $page = 1, $pagesize = 1000, $desc = TRUE, $key = 'tid', $col = array()) { if (empty($uid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('uid' => $uid), array('tid' => $orderby), $page, $pagesize, $key, $col); return $arr; } // 遍历栏目下tid 支持数组 $fid = array(1,2,3) function thread_tid_find_by_fid($fid, $page = 1, $pagesize = 1000, $desc = TRUE) { if (empty($fid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('fid' => $fid), array('tid' => $orderby), $page, $pagesize, 'tid', array('tid', 'verify_date')); return $arr; } function thread_tid_delete($tid) { if (empty($tid)) return FALSE; $r = thread_tid__delete(array('tid' => $tid)); return $r; } function thread_tid_count() { $n = thread_tid__count(); return $n; } // 统计用户主题数 大数量下严谨使用非主键统计 function thread_uid_count($uid) { $n = thread_tid__count(array('uid' => $uid)); return $n; } // 统计栏目主题数 大数量下严谨使用非主键统计 function thread_fid_count($fid) { $n = thread_tid__count(array('fid' => $fid)); return $n; } ?>python - How to create a system-wide environment to install pip packages when using pip inside a Docker container? - Stack Overf
最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

python - How to create a system-wide environment to install pip packages when using pip inside a Docker container? - Stack Overf

programmeradmin4浏览0评论

I am trying to build an existing Dockerfile, which needs to be migrated from an older ubuntu version base image to a more up-to-date base image.

The existing Dockerfile contains commands such as

pip install pandas

There are many such pip commands, each of which triggers the following error message.

error: externally-managed-environment

This is not unexpected. Recent versions of Ubuntu produce this error when the user attempts to install pip packages without first activating a virtual environment.

This can be fixed by creating an activating an virtual environment. The disadvantage is that inside a Docker container, this shouldn't really be needed, since a container is its own isolated environment. In addition, it creates an additional layer which is slightly inconvenient. RUN python3 my_file.py no longer works directly, as the venv has to be activated first. (There are two ways to do this, the easiest of which is to do RUN /path/to/.venv/bin/python3 /path/to/my_file.py.)

The error could also be "fixed" by passing the --break-system-packages argument. I do not know in detail what the consequences of this are, so I do not know if this could be a recommended solution in this context.

There is a third possibility, which would be to install python3-pandas (assuming it exists). This is an apt package which provides an installation of pandas via apt. I would prefer not to use this method, since not all pip packages are available as apt packages. I aim to try and avoid a fragmented install whereby some packages are provided through one method and other packages are provided through a different method.

To review:

  • What does the --break-system-packages command line option do? How "safe" is this inside a Docker container? (Rather than frequently creating and destroying this particular container, it tends to persist for a significant period of time. Typically several weeks to a few months.)
  • If this isn't a suitable or recommended approach, is there a way that I can conveniently create a system-wide virtual environment, and somehow cause it to be "permanently" activated. (In other words, to create some kind of "transparent" virtual environment, which isn't noticeable to the user - so that running python3 main.py will run main.py with the virtual environment active, automatically. Can this be done?)

I am trying to build an existing Dockerfile, which needs to be migrated from an older ubuntu version base image to a more up-to-date base image.

The existing Dockerfile contains commands such as

pip install pandas

There are many such pip commands, each of which triggers the following error message.

error: externally-managed-environment

This is not unexpected. Recent versions of Ubuntu produce this error when the user attempts to install pip packages without first activating a virtual environment.

This can be fixed by creating an activating an virtual environment. The disadvantage is that inside a Docker container, this shouldn't really be needed, since a container is its own isolated environment. In addition, it creates an additional layer which is slightly inconvenient. RUN python3 my_file.py no longer works directly, as the venv has to be activated first. (There are two ways to do this, the easiest of which is to do RUN /path/to/.venv/bin/python3 /path/to/my_file.py.)

The error could also be "fixed" by passing the --break-system-packages argument. I do not know in detail what the consequences of this are, so I do not know if this could be a recommended solution in this context.

There is a third possibility, which would be to install python3-pandas (assuming it exists). This is an apt package which provides an installation of pandas via apt. I would prefer not to use this method, since not all pip packages are available as apt packages. I aim to try and avoid a fragmented install whereby some packages are provided through one method and other packages are provided through a different method.

To review:

  • What does the --break-system-packages command line option do? How "safe" is this inside a Docker container? (Rather than frequently creating and destroying this particular container, it tends to persist for a significant period of time. Typically several weeks to a few months.)
  • If this isn't a suitable or recommended approach, is there a way that I can conveniently create a system-wide virtual environment, and somehow cause it to be "permanently" activated. (In other words, to create some kind of "transparent" virtual environment, which isn't noticeable to the user - so that running python3 main.py will run main.py with the virtual environment active, automatically. Can this be done?)
Share Improve this question asked Feb 17 at 15:33 user2138149user2138149 17.2k30 gold badges145 silver badges287 bronze badges 1
  • 2 Can you edit the question to include a minimal reproducible example? The simplest Python-in-Docker setups don't do anything at all to "create an environment", they just use the "system" pip. Something like RUN pip install . will install the current project, its dependencies as declared in pyproject.toml, and any Python entry point scripts. – David Maze Commented Feb 17 at 16:52
Add a comment  | 

1 Answer 1

Reset to default 2

What does the --break-system-packages command line option do?

In super short, installs packages to /usr/ , which is managed by apt-get .

If this isn't a suitable or recommended approach, is there a way that I can conveniently create a system-wide virtual environment, and somehow cause it to be "permanently" activated.

Something along:

RUN python3 -m venv /venv
ENV VIRTUAL_ENV=/venv
ENV PATH=/venv/bin:$PATH

But it could fail for packages that need LD_LIBRARY_PATH or PATH, so just make sure to activate the venv in all environments. For exampe the following adds /etc/profile.d/activate sh file and runs login shell (see https://www.gnu./software/bash/manual/html_node/Bash-Startup-Files.html ).

RUN python3 -m venv /venv
RUN ln -s /venv/bin/activate /etc/profile.d/myactivate.sh
SHELL ["bash", "-lc"]
ENTRYPOINT ["bash", "-l"]

Use pip. If you are using python, consider just using python docker image.

The recommended practice is to create a virtual env and not conflict with system managers like apt-get, also in docker.

与本文相关的文章

发布评论

评论列表(0)

  1. 暂无评论