Skip to content

Commit

Permalink
reformat
Browse files Browse the repository at this point in the history
  • Loading branch information
freeziyou committed Jan 30, 2024
1 parent c04fd9d commit 81b11eb
Show file tree
Hide file tree
Showing 3 changed files with 168 additions and 168 deletions.
114 changes: 57 additions & 57 deletions .github/workflows/docker-image.yml
Original file line number Diff line number Diff line change
@@ -1,57 +1,57 @@
name: Docker Image CI

on:
push:
branches: [ "main" ]
paths:
- '**'
- 'website/**'
workflow_dispatch:

jobs:
build-main-app:
runs-on: ubuntu-latest
if: ${{ github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && !contains(github.event.head_commit.modified, 'website')) }}
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Log in to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_HUB_ACCOUNT }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Build and push main app Docker image
uses: docker/build-push-action@v2
with:
context: .
file: deploy/Dockerfile
push: true
tags: connectai/gitmaya:latest

build-website:
runs-on: ubuntu-latest
if: ${{ github.event_name == 'workflow_dispatch' || contains(github.event.head_commit.modified, 'website') }}
steps:
- name: Check out the repo
uses: actions/checkout@v2
with:
submodules: 'true'
- name: Log in to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_HUB_ACCOUNT }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Build and push website Docker image
uses: docker/build-push-action@v2
with:
context: .
file: deploy/Dockerfile.proxy
push: true
tags: connectai/gitmaya-proxy:latest
- name: Build and push website Docker image-SaaS
uses: docker/build-push-action@v2
with:
context: .
file: deploy/Dockerfile.proxy.saas
push: true
tags: connectai/gitmaya-proxy:saas
name: Docker Image CI

on:
push:
branches: [ "main" ]
paths:
- '**'
- 'website/**'
workflow_dispatch:

jobs:
build-main-app:
runs-on: ubuntu-latest
if: ${{ github.event_name == 'workflow_dispatch' || (github.event_name == 'push' && !contains(github.event.head_commit.modified, 'website')) }}
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Log in to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_HUB_ACCOUNT }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Build and push main app Docker image
uses: docker/build-push-action@v2
with:
context: .
file: deploy/Dockerfile
push: true
tags: connectai/gitmaya:latest

build-website:
runs-on: ubuntu-latest
if: ${{ github.event_name == 'workflow_dispatch' || contains(github.event.head_commit.modified, 'website') }}
steps:
- name: Check out the repo
uses: actions/checkout@v2
with:
submodules: 'true'
- name: Log in to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_HUB_ACCOUNT }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Build and push website Docker image
uses: docker/build-push-action@v2
with:
context: .
file: deploy/Dockerfile.proxy
push: true
tags: connectai/gitmaya-proxy:latest
- name: Build and push website Docker image-SaaS
uses: docker/build-push-action@v2
with:
context: .
file: deploy/Dockerfile.proxy.saas
push: true
tags: connectai/gitmaya-proxy:saas
2 changes: 1 addition & 1 deletion deploy/Dockerfile.proxy.saas
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ RUN corepack enable
COPY ./website /app
WORKDIR /app

RUN pnpm install --frozen-lockfile
RUN pnpm install --frozen-lockfile
RUN pnpm build:saas

FROM jwilder/nginx-proxy:alpine
Expand Down
220 changes: 110 additions & 110 deletions server/utils/redis.py
Original file line number Diff line number Diff line change
@@ -1,110 +1,110 @@
import asyncio
import functools
import logging
import pickle
import random
from inspect import iscoroutinefunction

import redis
from app import app

app.config.setdefault("REDIS_URL", "redis://redis:6379/0")

client = redis.from_url(app.config["REDIS_URL"], decode_responses=True)


class RedisStorage(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
if v:
self.set(k, v)

def get(self, name):
return client.get(name)

def set(self, name, value):
client.set(name, value)


def get_client(decode_responses=False):
return redis.from_url(app.config["REDIS_URL"], decode_responses=decode_responses)


def gen_prefix(obj, method):
return ".".join([obj.__module__, obj.__class__.__name__, method.__name__])


def stalecache(
key=None,
expire=600,
stale=3600,
time_lock=1,
time_delay=1,
max_time_delay=10,
):
def decorate(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
if kwargs.get("skip_cache"):
return method(*args, **kwargs)
name = args[0] if args and not key else None

res = get_client(False).pipeline().ttl(name).get(name).execute()
v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None
if res[0] <= 0 or res[0] < stale:

def func():
value = method(*args, **kwargs)
logging.debug("update cache: %s", name)
get_client(False).pipeline().set(name, pickle.dumps(value)).expire(
name, expire + stale
).execute()
return value

# create new cache in blocking modal, if cache not exists.
if res[0] <= 0:
return func()

# create new cache in non blocking modal, and return stale data.
# set expire to get a "lock", and delay to run the task
real_time_delay = random.randrange(time_delay, max_time_delay)
get_client(False).expire(name, stale + real_time_delay + time_lock)
# 创建一个 asyncio 任务来执行 func
asyncio.create_task(asyncio.sleep(real_time_delay, func()))

return v

@functools.wraps(method)
async def async_wrapper(*args, **kwargs):
if kwargs.get("skip_cache"):
return await method(*args, **kwargs)

name = args[0] if args and not key else None

res = get_client(False).pipeline().ttl(name).get(name).execute()
v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None
if res[0] <= 0 or res[0] < stale:

async def func():
value = await method(*args, **kwargs)
logging.debug("update cache: %s", name)
get_client(False).pipeline().set(name, pickle.dumps(value)).expire(
name, expire + stale
).execute()
return value

# create new cache in blocking modal, if cache not exists.
if res[0] <= 0:
return await func()

# create new cache in non blocking modal, and return stale data.
# set expire to get a "lock", and delay to run the task
real_time_delay = random.randrange(time_delay, max_time_delay)
get_client(False).expire(name, stale + real_time_delay + time_lock)
asyncio.create_task(asyncio.sleep(real_time_delay, func()))

return v

return async_wrapper if iscoroutinefunction(method) else wrapper

return decorate
import asyncio
import functools
import logging
import pickle
import random
from inspect import iscoroutinefunction

import redis
from app import app

app.config.setdefault("REDIS_URL", "redis://redis:6379/0")

client = redis.from_url(app.config["REDIS_URL"], decode_responses=True)


class RedisStorage(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
if v:
self.set(k, v)

def get(self, name):
return client.get(name)

def set(self, name, value):
client.set(name, value)


def get_client(decode_responses=False):
return redis.from_url(app.config["REDIS_URL"], decode_responses=decode_responses)


def gen_prefix(obj, method):
return ".".join([obj.__module__, obj.__class__.__name__, method.__name__])


def stalecache(
key=None,
expire=600,
stale=3600,
time_lock=1,
time_delay=1,
max_time_delay=10,
):
def decorate(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
if kwargs.get("skip_cache"):
return method(*args, **kwargs)
name = args[0] if args and not key else None

res = get_client(False).pipeline().ttl(name).get(name).execute()
v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None
if res[0] <= 0 or res[0] < stale:

def func():
value = method(*args, **kwargs)
logging.debug("update cache: %s", name)
get_client(False).pipeline().set(name, pickle.dumps(value)).expire(
name, expire + stale
).execute()
return value

# create new cache in blocking modal, if cache not exists.
if res[0] <= 0:
return func()

# create new cache in non blocking modal, and return stale data.
# set expire to get a "lock", and delay to run the task
real_time_delay = random.randrange(time_delay, max_time_delay)
get_client(False).expire(name, stale + real_time_delay + time_lock)
# 创建一个 asyncio 任务来执行 func
asyncio.create_task(asyncio.sleep(real_time_delay, func()))

return v

@functools.wraps(method)
async def async_wrapper(*args, **kwargs):
if kwargs.get("skip_cache"):
return await method(*args, **kwargs)

name = args[0] if args and not key else None

res = get_client(False).pipeline().ttl(name).get(name).execute()
v = pickle.loads(res[1]) if res[0] > 0 and res[1] else None
if res[0] <= 0 or res[0] < stale:

async def func():
value = await method(*args, **kwargs)
logging.debug("update cache: %s", name)
get_client(False).pipeline().set(name, pickle.dumps(value)).expire(
name, expire + stale
).execute()
return value

# create new cache in blocking modal, if cache not exists.
if res[0] <= 0:
return await func()

# create new cache in non blocking modal, and return stale data.
# set expire to get a "lock", and delay to run the task
real_time_delay = random.randrange(time_delay, max_time_delay)
get_client(False).expire(name, stale + real_time_delay + time_lock)
asyncio.create_task(asyncio.sleep(real_time_delay, func()))

return v

return async_wrapper if iscoroutinefunction(method) else wrapper

return decorate

0 comments on commit 81b11eb

Please sign in to comment.