ソースを参照

:construction: add rabbitmq & toml & logger support for ai-translate
worker

Jeremy Zheng 9 ヶ月 前
コミット
ae69bc55e9

+ 1 - 1
ai-translate/README.md

@@ -9,5 +9,5 @@ $ source $HOME/tmp/python3/bin/activate
 > python3 -m pip install -e .
 
 > python3 -m ai_translate -h
-> python3 -m ai_translate -c config.toml -n worker-1 -q ai.translate.us
+> python3 -m ai_translate -d -c config.toml -n worker-us-1 -q ai.translate.us
 ```

+ 34 - 2
ai-translate/ai_translate/__init__.py

@@ -1,4 +1,36 @@
+import logging
+import tomllib
 
+import pika
 
-def launch():
-    print('Hello, Mint!')
+from .worker import handle_message
+
+logger = logging.getLogger(__name__)
+
+
+def start_consumer(name, queue, config):
+    connection = pika.BlockingConnection(
+        pika.ConnectionParameters(
+            host=config['host'], port=config['port'],
+            credentials=pika.PlainCredentials(
+                config['user'], config['password']),
+            virtual_host=config['virtual-host']))
+    channel = connection.channel()
+
+    def callback(ch, method, properties, body):
+        logger.info("received message(%s,%s)",
+                    properties.message_id, properties.content_type)
+        handle_message(properties.message_id, properties.content_type, body)
+
+    channel.basic_consume(
+        queue=queue, on_message_callback=callback, auto_ack=True)
+
+    logger.info('start a consumer(%s) for queue(%s)', name, queue)
+    channel.start_consuming()
+
+
+def launch(name, queue, config_file):
+    logger.debug('load configuration from %s', config_file)
+    with open(config_file, "rb") as config_fd:
+        config = tomllib.load(config_fd)
+        start_consumer(name, queue, config['rabbitmq'])

+ 44 - 2
ai-translate/ai_translate/__main__.py

@@ -1,4 +1,46 @@
-from ai_translate import launch
+import logging
+import argparse
+import sys
+import os
+
+from . import launch
+
+logger = logging.getLogger(__name__)
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        prog='ai-translate',
+        description='An OpenAI consumer process',
+        epilog='https://github.com/iapt-platform/mint/tree/master')
+    parser.add_argument('-c', '--config', type=str,
+                        default='config.toml', help='configuration file')
+    parser.add_argument('-q', '--queue', type=str, required=True,
+                        help='queue name')
+    parser.add_argument('-n', '--name', type=str, required=True,
+                        help='consumer name')
+    parser.add_argument('-d', '--debug',
+                        action='store_true', help='run on debug mode')
+    parser.add_argument('-v', '--version',
+                        action='version', version='%(prog)s v2025.6.11')
+    args = parser.parse_args()
+
+    if args.debug:
+        logging.basicConfig(
+            level=logging.DEBUG if args.debug else logging.INFO, format='%(levelname)-5s %(asctime)s(%(pathname)s %(lineno)d): %(message)s')
+    else:
+        logging.basicConfig(
+            level=logging.DEBUG if args.debug else logging.INFO, format='%(levelname)-5s %(asctime)s(%(module)s): %(message)s')
+
+    try:
+        launch(args.name, args.queue, args.config)
+    except KeyboardInterrupt:
+        logger.warning('receive interrupted signal, exit...')
+        try:
+            sys.exit(0)
+        except SystemExit:
+            os._exit(0)
+
 
 if __name__ == '__main__':
-    launch()
+    main()

+ 7 - 0
ai-translate/ai_translate/worker.py

@@ -0,0 +1,7 @@
+import logging
+
+logger = logging.getLogger(__name__)
+
+
+def handle_message(id, content_type, body):
+    logger.info("TODO")

+ 6 - 0
ai-translate/config.orig.toml

@@ -0,0 +1,6 @@
+[rabbitmq]
+host = '127.0.0.1'
+port = 5672
+user = 'www'
+password = 'change-me'
+virtual-host = 'testing'