diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 04fa51a4..5b00a918 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,57 +1,62 @@ -name: Docker Image CI - -on: - push: - branches: [ "main" ] - paths: - - '**' - - 'website/**' - workflow_dispatch: - -jobs: - build-main-app: - runs-on: ubuntu-latest - if: ${{ github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && !contains(github.event.head_commit.modified, 'website')) }} - steps: - - name: Check out the repo - uses: actions/checkout@v2 - - name: Log in to Docker Hub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_HUB_ACCOUNT }} - password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - - name: Build and push main app Docker image - uses: docker/build-push-action@v2 - with: - context: . - file: deploy/Dockerfile - push: true - tags: connectai/gitmaya:latest - - build-website: - runs-on: ubuntu-latest - if: ${{ github.event_name == 'workflow_dispatch' || contains(github.event.head_commit.modified, 'website') }} - steps: - - name: Check out the repo - uses: actions/checkout@v2 - with: - submodules: 'true' - - name: Log in to Docker Hub - uses: docker/login-action@v1 - with: - username: ${{ secrets.DOCKER_HUB_ACCOUNT }} - password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} - - name: Build and push website Docker image - uses: docker/build-push-action@v2 - with: - context: . - file: deploy/Dockerfile.proxy - push: true - tags: connectai/gitmaya-proxy:latest - - name: Build and push website Docker image-SaaS - uses: docker/build-push-action@v2 - with: - context: . - file: deploy/Dockerfile.proxy.saas - push: true - tags: connectai/gitmaya-proxy:saas +name: Docker Image CI + +on: + push: + branches: [ "main" ] + paths: + - '**' + - 'website/**' + workflow_dispatch: + +jobs: + build-main-app: + runs-on: ubuntu-latest + if: ${{ github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && !contains(github.event.head_commit.modified, 'website')) }} + steps: + - name: Check out the repo + uses: actions/checkout@v2 + + - name: Log in to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_ACCOUNT }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Build and push main app Docker image + uses: docker/build-push-action@v2 + with: + context: . + file: deploy/Dockerfile + push: true + tags: connectai/gitmaya:latest + + build-website: + runs-on: ubuntu-latest + if: ${{ github.event_name == 'workflow_dispatch' || contains(github.event.head_commit.modified, 'website') }} + steps: + - name: Check out the repo + uses: actions/checkout@v2 + with: + submodules: 'true' + + - name: Log in to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_ACCOUNT }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Build and push website Docker image + uses: docker/build-push-action@v2 + with: + context: . + file: deploy/Dockerfile.proxy + push: true + tags: connectai/gitmaya-proxy:latest + + - name: Build and push website Docker image-SaaS + uses: docker/build-push-action@v2 + with: + context: . + file: deploy/Dockerfile.proxy.saas + push: true + tags: connectai/gitmaya-proxy:saas diff --git a/deploy/Dockerfile.proxy.saas b/deploy/Dockerfile.proxy.saas index a149a8a3..ce46c91f 100644 --- a/deploy/Dockerfile.proxy.saas +++ b/deploy/Dockerfile.proxy.saas @@ -8,8 +8,7 @@ RUN corepack enable COPY ./website /app WORKDIR /app -RUN pnpm install --frozen-lockfile -RUN pnpm build:saas +RUN pnpm install --frozen-lockfile && pnpm build:saas FROM jwilder/nginx-proxy:alpine diff --git a/server/tasks/lark/issue.py b/server/tasks/lark/issue.py index 789e8a39..03deb7a5 100644 --- a/server/tasks/lark/issue.py +++ b/server/tasks/lark/issue.py @@ -312,10 +312,10 @@ def send_issue_comment(issue_id, comment, user_name: str): ) if chat_group and issue.message_id: bot, _ = get_bot_by_application_id(chat_group.im_application_id) - # 替换 comment 中的图片 url 为 image_key comment = replace_images_with_keys(comment, bot) - content = gen_comment_message(user_name, comment) + # 统一用富文本回答, 支持图片、at + content = gen_comment_post_message(user_name, comment) result = bot.reply( issue.message_id, FeishuPostMessage(*content), @@ -324,27 +324,73 @@ def send_issue_comment(issue_id, comment, user_name: str): return False -def gen_comment_message(user_name, comment): +def gen_comment_post_message(user_name, comment): comment = comment.replace("\r\n", "\n") comment = re.sub(r"!\[.*?\]\((.*?)\)", r"\n\1\n", comment) - pattern = r"img_v\d{1,}_\w{4}_[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}" - messages = [] - messages.append([FeishuPostMessageText(f"@{user_name}: ")]) + img_key_pattern = r"img_v\d{1,}_\w{4}_[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}" + messages = [[FeishuPostMessageText(f"@{user_name}: ")]] # 根据换行符分割 - elements = re.split("\n", comment) - for element in elements: - if not element or element == "": + lines = re.split("\n", comment) + for line in lines: + if not line or line == "": continue - if re.match(pattern, element): - messages.append([FeishuPostMessageImage(image_key=element)]) - else: # 处理文本部分 - messages.append([FeishuPostMessageText(text=element)]) + if re.match(img_key_pattern, line): + messages.append([FeishuPostMessageImage(image_key=line)]) + else: + # 处理每行 at, 普通文本 + elements = line.split(" ") + element_messages = [] + for element in elements: + if element.startswith("@"): + user_id = get_openid_by_code_name(element[1:]) + element_messages.append( + FeishuPostMessageAt(user_id=user_id) + if user_id + else FeishuPostMessageText(text=element) + ) + else: + element_messages.append(FeishuPostMessageText(text=element)) + + messages.append(element_messages) return messages +def get_openid_by_code_name(code_name): + code_user_id = ( + db.session.query(CodeUser.id) + .filter( + CodeUser.name == code_name, + ) + .limit(1) + .scalar() + ) + if not code_user_id: + logging.info(f"get_openid_by_code_name---code_user_id: Not found") + return None + + openid = ( + db.session.query(IMUser.openid) + .join( + TeamMember, + TeamMember.im_user_id == IMUser.id, + ) + .filter( + TeamMember.code_user_id == code_user_id, + ) + .limit(1) + .scalar() + ) + + if not openid: + logging.info(f"get_openid_by_code_name---openid: Not found") + return None + + return openid + + @celery.task() def update_issue_card(issue_id: str): """Update issue card message. @@ -456,8 +502,35 @@ def create_issue_comment(app_id, message_id, content, data, *args, **kwargs): github_app, team, repo, issue, _, _ = _get_github_app( app_id, message_id, content, data, *args, **kwargs ) + comment_text = content["text"] + + # 判断 content 中是否有 at + if "mentions" in data["event"]["message"]: + # 获得 mentions 中的 openid list + mentions = data["event"]["message"]["mentions"] + openid_list = [mention["id"]["open_id"] for mention in mentions] + code_name_list = [] + + for openid in openid_list: + # 通过 openid list 获得 code_name_list + code_name_list.append( + get_github_name_by_openid( + openid, + team.id, + app_id, + message_id, + content, + data, + *args, + **kwargs, + ) + ) + + # 替换 content 中的 im_name 为 code_name + comment_text = replace_im_name_to_github_name(content["text"], code_name_list) + response = github_app.create_issue_comment( - team.name, repo.name, issue.issue_number, content["text"] + team.name, repo.name, issue.issue_number, comment_text ) if "id" not in response: return send_issue_failed_tip( @@ -466,6 +539,89 @@ def create_issue_comment(app_id, message_id, content, data, *args, **kwargs): return response +def replace_im_name_to_github_name(content, code_name_list): + """ + replace im name to github name + + Args: + content (str): content + code_name_list (list): code name list + + Returns: + str: replaced content + """ + + # 替换函数 + def replace_user(match): + index = int(match.group(1)) - 1 # 获取用户编号并转换为索引 + return ( + f"@{code_name_list[index]}" + if 0 <= index < len(code_name_list) + else match.group(0) + ) + + return re.sub(r"@_user_(\d+)", replace_user, content) + + +def get_github_name_by_openid( + openid, team_id, app_id, message_id, content, data, *args, **kwargs +): + """ + get github name by openid + + Args: + openid (str): openid + team_id (str): team_id + app_id (str): app_id + message_id (str): message_id + content (str): content + data (dict): data + + Returns: + str: GitHub name + """ + # 第一步:根据 openid 和 team_id 查询 team_member 表得到 im_user_id + im_user_id = ( + db.session.query(TeamMember.im_user_id) + .join( + IMUser, # BindUser 表是 CodeUser 和 IMUser 的别名 + IMUser.id == TeamMember.im_user_id, + ) + .filter( + IMUser.openid == openid, + TeamMember.team_id == team_id, + ) + .limit(1) + .scalar() + ) + + if not im_user_id: + return send_issue_failed_tip( + "找不到对应的飞书用户", app_id, message_id, content, data, *args, **kwargs + ) + + # 第二步:使用 im_user_id 和 team_id 再次查询 team_member 表得到 code_user_id + code_user_id = ( + db.session.query(TeamMember.code_user_id) + .filter( + TeamMember.im_user_id == im_user_id, + TeamMember.team_id == team_id, + ) + .limit(1) + .scalar() + ) + + if not code_user_id: + return send_issue_failed_tip( + "找不到对应的 GitHub 用户", app_id, message_id, content, data, *args, **kwargs + ) + + # 第三步:如果找到了 code_user_id,使用它在 bind_user 表中查询 name + name = db.session.query(CodeUser.name).filter(CodeUser.id == code_user_id).scalar() + + return name + + @celery.task() @with_authenticated_github() def close_issue(app_id, message_id, content, data, *args, **kwargs): diff --git a/server/utils/lark/parser.py b/server/utils/lark/parser.py index 8f5a99b7..ce6c1cc2 100644 --- a/server/utils/lark/parser.py +++ b/server/utils/lark/parser.py @@ -127,11 +127,6 @@ def init_subparsers(self): parser_reopen = self.subparsers.add_parser("/reopen") parser_reopen.set_defaults(func=self.on_reopen) - # TODO 这里实际上拿到的信息是 @_user_1,需要检查是不是当前机器人 - parser_at_bot = self.subparsers.add_parser("at_user_1") - parser_at_bot.add_argument("command", nargs="*") - parser_at_bot.set_defaults(func=self.on_at_bot) - def _get_topic_by_args(self, *args): # 新增一个判断是不是在issue/pr/repo的话题中 chat_type, topic = "", "" @@ -510,27 +505,6 @@ def on_reopen(self, param, unkown, *args, **kwargs): tasks.reopen_pull_request.delay(*args, **kwargs) return "reopen", param, unkown - def on_at_bot(self, param, unkown, *args, **kwargs): - logging.info("on_at_user_1 %r %r", vars(param), unkown) - - raw_message = args[3] - user_id = raw_message["event"]["message"]["mentions"][0]["id"]["user_id"] - user_key = raw_message["event"]["message"]["mentions"][0]["key"] - logging.info(f"user_id: {user_id}") - logging.info(f"user_key: {user_key}") - # command = param.command - content = args[2].split(" ", 1) - - # 判断机器人 - if user_key == "@_user_1" and user_id is None: - command = content[1] if len(content) > 1 else None - # 判断@bot 后续命令合法即执行 - if command: - self.parse_args(command, *args, **kwargs) - return self.on_help(param, unkown, *args, **kwargs) - - return "on_at_bot", param, unkown - def parse_args(self, command, *args, **kwargs): try: # edit可能是多行的,第一行可能没有空格 diff --git a/server/utils/redis.py b/server/utils/redis.py index ceda407d..41a93087 100644 --- a/server/utils/redis.py +++ b/server/utils/redis.py @@ -1,110 +1,110 @@ -import asyncio -import functools -import logging -import pickle -import random -from inspect import iscoroutinefunction - -import redis -from app import app - -app.config.setdefault("REDIS_URL", "redis://redis:6379/0") - -client = redis.from_url(app.config["REDIS_URL"], decode_responses=True) - - -class RedisStorage(object): - def __init__(self, **kwargs): - for k, v in kwargs.items(): - if v: - self.set(k, v) - - def get(self, name): - return client.get(name) - - def set(self, name, value): - client.set(name, value) - - -def get_client(decode_responses=False): - return redis.from_url(app.config["REDIS_URL"], decode_responses=decode_responses) - - -def gen_prefix(obj, method): - return ".".join([obj.__module__, obj.__class__.__name__, method.__name__]) - - -def stalecache( - key=None, - expire=600, - stale=3600, - time_lock=1, - time_delay=1, - max_time_delay=10, -): - def decorate(method): - @functools.wraps(method) - def wrapper(*args, **kwargs): - if kwargs.get("skip_cache"): - return method(*args, **kwargs) - name = args[0] if args and not key else None - - res = get_client(False).pipeline().ttl(name).get(name).execute() - v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None - if res[0] <= 0 or res[0] < stale: - - def func(): - value = method(*args, **kwargs) - logging.debug("update cache: %s", name) - get_client(False).pipeline().set(name, pickle.dumps(value)).expire( - name, expire + stale - ).execute() - return value - - # create new cache in blocking modal, if cache not exists. - if res[0] <= 0: - return func() - - # create new cache in non blocking modal, and return stale data. - # set expire to get a "lock", and delay to run the task - real_time_delay = random.randrange(time_delay, max_time_delay) - get_client(False).expire(name, stale + real_time_delay + time_lock) - # 创建一个 asyncio 任务来执行 func - asyncio.create_task(asyncio.sleep(real_time_delay, func())) - - return v - - @functools.wraps(method) - async def async_wrapper(*args, **kwargs): - if kwargs.get("skip_cache"): - return await method(*args, **kwargs) - - name = args[0] if args and not key else None - - res = get_client(False).pipeline().ttl(name).get(name).execute() - v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None - if res[0] <= 0 or res[0] < stale: - - async def func(): - value = await method(*args, **kwargs) - logging.debug("update cache: %s", name) - get_client(False).pipeline().set(name, pickle.dumps(value)).expire( - name, expire + stale - ).execute() - return value - - # create new cache in blocking modal, if cache not exists. - if res[0] <= 0: - return await func() - - # create new cache in non blocking modal, and return stale data. - # set expire to get a "lock", and delay to run the task - real_time_delay = random.randrange(time_delay, max_time_delay) - get_client(False).expire(name, stale + real_time_delay + time_lock) - asyncio.create_task(asyncio.sleep(real_time_delay, func())) - - return v - - return async_wrapper if iscoroutinefunction(method) else wrapper - - return decorate +import asyncio +import functools +import logging +import pickle +import random +from inspect import iscoroutinefunction + +import redis +from app import app + +app.config.setdefault("REDIS_URL", "redis://redis:6379/0") + +client = redis.from_url(app.config["REDIS_URL"], decode_responses=True) + + +class RedisStorage(object): + def __init__(self, **kwargs): + for k, v in kwargs.items(): + if v: + self.set(k, v) + + def get(self, name): + return client.get(name) + + def set(self, name, value): + client.set(name, value) + + +def get_client(decode_responses=False): + return redis.from_url(app.config["REDIS_URL"], decode_responses=decode_responses) + + +def gen_prefix(obj, method): + return ".".join([obj.__module__, obj.__class__.__name__, method.__name__]) + + +def stalecache( + key=None, + expire=600, + stale=3600, + time_lock=1, + time_delay=1, + max_time_delay=10, +): + def decorate(method): + @functools.wraps(method) + def wrapper(*args, **kwargs): + if kwargs.get("skip_cache"): + return method(*args, **kwargs) + name = args[0] if args and not key else None + + res = get_client(False).pipeline().ttl(name).get(name).execute() + v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None + if res[0] <= 0 or res[0] < stale: + + def func(): + value = method(*args, **kwargs) + logging.debug("update cache: %s", name) + get_client(False).pipeline().set(name, pickle.dumps(value)).expire( + name, expire + stale + ).execute() + return value + + # create new cache in blocking modal, if cache not exists. + if res[0] <= 0: + return func() + + # create new cache in non blocking modal, and return stale data. + # set expire to get a "lock", and delay to run the task + real_time_delay = random.randrange(time_delay, max_time_delay) + get_client(False).expire(name, stale + real_time_delay + time_lock) + # 创建一个 asyncio 任务来执行 func + asyncio.create_task(asyncio.sleep(real_time_delay, func())) + + return v + + @functools.wraps(method) + async def async_wrapper(*args, **kwargs): + if kwargs.get("skip_cache"): + return await method(*args, **kwargs) + + name = args[0] if args and not key else None + + res = get_client(False).pipeline().ttl(name).get(name).execute() + v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None + if res[0] <= 0 or res[0] < stale: + + async def func(): + value = await method(*args, **kwargs) + logging.debug("update cache: %s", name) + get_client(False).pipeline().set(name, pickle.dumps(value)).expire( + name, expire + stale + ).execute() + return value + + # create new cache in blocking modal, if cache not exists. + if res[0] <= 0: + return await func() + + # create new cache in non blocking modal, and return stale data. + # set expire to get a "lock", and delay to run the task + real_time_delay = random.randrange(time_delay, max_time_delay) + get_client(False).expire(name, stale + real_time_delay + time_lock) + asyncio.create_task(asyncio.sleep(real_time_delay, func())) + + return v + + return async_wrapper if iscoroutinefunction(method) else wrapper + + return decorate