te')); return $arr; } /* 遍历用户所有主题 * @param $uid 用户ID * @param int $page 页数 * @param int $pagesize 每页记录条数 * @param bool $desc 排序方式 TRUE降序 FALSE升序 * @param string $key 返回的数组用那一列的值作为 key * @param array $col 查询哪些列 */ function thread_tid_find_by_uid($uid, $page = 1, $pagesize = 1000, $desc = TRUE, $key = 'tid', $col = array()) { if (empty($uid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('uid' => $uid), array('tid' => $orderby), $page, $pagesize, $key, $col); return $arr; } // 遍历栏目下tid 支持数组 $fid = array(1,2,3) function thread_tid_find_by_fid($fid, $page = 1, $pagesize = 1000, $desc = TRUE) { if (empty($fid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('fid' => $fid), array('tid' => $orderby), $page, $pagesize, 'tid', array('tid', 'verify_date')); return $arr; } function thread_tid_delete($tid) { if (empty($tid)) return FALSE; $r = thread_tid__delete(array('tid' => $tid)); return $r; } function thread_tid_count() { $n = thread_tid__count(); return $n; } // 统计用户主题数 大数量下严谨使用非主键统计 function thread_uid_count($uid) { $n = thread_tid__count(array('uid' => $uid)); return $n; } // 统计栏目主题数 大数量下严谨使用非主键统计 function thread_fid_count($fid) { $n = thread_tid__count(array('fid' => $fid)); return $n; } ?>questdb Python library "questdb.ingress.Sender' has no attribute 'from_conf'" using do
最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

questdb Python library "questdb.ingress.Sender' has no attribute 'from_conf'" using do

programmeradmin4浏览0评论

I have a simple setup to get telemetry off an energy meter and graph it.

Meter > MQTT > RabbitMQ > Python listener > QuestDB > Grafana.

I got it all working on a single EC2 instance running as services. I then rebuilt in a new instance using Docker - separate container for RabbitMQ, QuestDB, python script and Grafana.

I can get the script to run from inside its own container, consuming from rabbitmq in another.

But it wont write to the quest db database in another container.

I can ping from my "listener" container to "rabbitmq" and "questdb" containers (from inside "listener"), the python library for RabbitMQ works fine (pika) but now the questdb "Sender" call I make that used to work just fine when the python was running from the host to the containers now won't work - gives

AttributeError: type object 'questdb.ingress.Sender' has no attribute 'from_conf'

And it’s not giving a connection failure error, it’s just puking instantly.

I can successfully write to the questdb db using that same python script from a different EC2 instance.

It seems like the Sender func does not like talking from container to container inside the same host.

Code:

with Sender.from_conf(conf) as sender:
            sender.row(
                'Sensu',
                columns={'DeviceID': DeviceID, 'Voltage_rms': payload[0], 'Current_rms': payload[1], 'Power_real':payload[2], 'Import_energy': payload[3], 'Export_energy': payload[4], 'Frequency': payload[5], 'PowerFactor': payload[6], 'ListenerTimestamp': ListenerTimestamp, "MessageCount": MessageCount},
                at=TimestampNanos.now())

Using conf = f'http::addr=localhost:9000;' << every permutation of this, localhost, container network name, internal IP address, with and without auth.

Docker-compose.yaml:

name: sensu
services:
   rabbitmq:
      image: rabbitmq:3-management
      container_name: rabbitmq
      hostname: rabbitmq-server
      restart: unless-stopped
      ports:
        - 5672:5672
        - 15672:15672
        - 1883:1883
      environment:
        - COMPOSE_PROJECT_NAME=sensu
        - RABBITMQ_DEFAULT_PASS=sensupass
        - RABBITMQ_DEFAULT_USER=sensu-mqtt
      command: "/bin/bash -c \"rabbitmq-plugins enable --offline rabbitmq_mqtt; rabbitmq-server\""
      volumes:
        - ~/.docker-conf/rabbitmq/data/:/var/lib/rabbitmq/
        - ~/.docker-conf/rabbitmq/log/:/var/log/rabbitmq
      networks:
        - sensu_net

   questdb:
      image: questdb/questdb
      container_name: questdb
      hostname: questdb-server
      restart: unless-stopped
      ports:
        - 9000:9000
      environment:
        - COMPOSE_PROJECT_NAME=sensu
      volumes:
        - ~/questdb_data/questdb:/root/.questdb/db
      networks:
        - sensu_net

networks:
  sensu_net:
    name: sensu_net
    driver: bridge

I have a simple setup to get telemetry off an energy meter and graph it.

Meter > MQTT > RabbitMQ > Python listener > QuestDB > Grafana.

I got it all working on a single EC2 instance running as services. I then rebuilt in a new instance using Docker - separate container for RabbitMQ, QuestDB, python script and Grafana.

I can get the script to run from inside its own container, consuming from rabbitmq in another.

But it wont write to the quest db database in another container.

I can ping from my "listener" container to "rabbitmq" and "questdb" containers (from inside "listener"), the python library for RabbitMQ works fine (pika) but now the questdb "Sender" call I make that used to work just fine when the python was running from the host to the containers now won't work - gives

AttributeError: type object 'questdb.ingress.Sender' has no attribute 'from_conf'

And it’s not giving a connection failure error, it’s just puking instantly.

I can successfully write to the questdb db using that same python script from a different EC2 instance.

It seems like the Sender func does not like talking from container to container inside the same host.

Code:

with Sender.from_conf(conf) as sender:
            sender.row(
                'Sensu',
                columns={'DeviceID': DeviceID, 'Voltage_rms': payload[0], 'Current_rms': payload[1], 'Power_real':payload[2], 'Import_energy': payload[3], 'Export_energy': payload[4], 'Frequency': payload[5], 'PowerFactor': payload[6], 'ListenerTimestamp': ListenerTimestamp, "MessageCount": MessageCount},
                at=TimestampNanos.now())

Using conf = f'http::addr=localhost:9000;' << every permutation of this, localhost, container network name, internal IP address, with and without auth.

Docker-compose.yaml:

name: sensu
services:
   rabbitmq:
      image: rabbitmq:3-management
      container_name: rabbitmq
      hostname: rabbitmq-server
      restart: unless-stopped
      ports:
        - 5672:5672
        - 15672:15672
        - 1883:1883
      environment:
        - COMPOSE_PROJECT_NAME=sensu
        - RABBITMQ_DEFAULT_PASS=sensupass
        - RABBITMQ_DEFAULT_USER=sensu-mqtt
      command: "/bin/bash -c \"rabbitmq-plugins enable --offline rabbitmq_mqtt; rabbitmq-server\""
      volumes:
        - ~/.docker-conf/rabbitmq/data/:/var/lib/rabbitmq/
        - ~/.docker-conf/rabbitmq/log/:/var/log/rabbitmq
      networks:
        - sensu_net

   questdb:
      image: questdb/questdb
      container_name: questdb
      hostname: questdb-server
      restart: unless-stopped
      ports:
        - 9000:9000
      environment:
        - COMPOSE_PROJECT_NAME=sensu
      volumes:
        - ~/questdb_data/questdb:/root/.questdb/db
      networks:
        - sensu_net

networks:
  sensu_net:
    name: sensu_net
    driver: bridge
Share Improve this question edited Feb 17 at 15:23 dthorbur 1,0693 gold badges13 silver badges26 bronze badges asked Feb 17 at 14:18 d0djad0dja 14 bronze badges 7
  • 1 Which version of the python client are you using? from_conf was added about a year ago in version 2.0.0 py-questdb-client.readthedocs.io/en/latest/changelog.html#id7 – Javier Ramirez Commented Feb 17 at 14:54
  • Should be latest... installing using "pip install -U questdb". – d0dja Commented Feb 17 at 14:58
  • 1 Could you please verify with pip list inside the container running the client? – Javier Ramirez Commented Feb 17 at 15:39
  • WIll do. FWIW this is the Dockerfile (using compose for questdb and rabbit as they're done; still working on the listener. – d0dja Commented Feb 17 at 15:50
  • Dockerfile: FROM python:3.7-alpine COPY . / RUN pip3 install pika questdb WORKDIR /listener pip install --upgrade pip COPY telemetry_insert.py /listener CMD ["python3", "telemetry_insert.py"] – d0dja Commented Feb 17 at 15:51
 |  Show 2 more comments

1 Answer 1

Reset to default 0

From your comment, this is a minimal Dockerfile you are using:

FROM python:3.7-alpine
COPY . /
RUN pip3 install -U pika
RUN pip3 install -U questdb

Building this, creating a test container, and checking the package version for questdb:

❯ docker run --rm -it --entrypoint /bin/sh test

/ # pip list
Package    Version
---------- -------
pika       1.3.2
pip        23.0.1
questdb    1.1.0
setuptools 57.5.0
wheel      0.41.2

Since from_conf was introduced in questdb==2.0.0, this is the result of your error. To fix this, you will need to install a newer version of questdb. To do this, change your Dockerfile to a more recent Python version. For example:

FROM python:3.12-alpine

This gives you a more recent questdb version:

❯ docker run --rm -it --entrypoint /bin/sh test

/ # pip list
Package Version
------- -------
pika    1.3.2
pip     24.3.1
questdb 2.0.3

For future reference, navigate to the PyPi package link and scroll down to find the required Python version (Requires: Python >=Y.XZ) for your package: https://pypi./project/questdb/

  • Credit to @Javier Ramirez for finding the package release notes for v2.0.0

与本文相关的文章

发布评论

评论列表(0)

  1. 暂无评论