支持分页和多分支ID获取,解决token更新问题

This commit is contained in:
淮新 2024-05-08 16:02:47 +08:00
parent b8091f0f05
commit b3f634b1f9
5 changed files with 79 additions and 41 deletions

13
API.md
View File

@ -188,14 +188,17 @@ SYNC_DIR = os.getenv("SYNC_DIR", "/tmp/sync_dir/")
注: 仓库由内到外同步时,分支输入内部仓库分支名;仓库由外到内同步时,分支输入外部仓库分支名;
## 日志信息获取
允许用户通过此接口获取仓库/分支的同步日志。
允许用户通过此接口分页、使用多个分支ID获取仓库/分支的同步日志。
- **URL**`/cerobot/sync/repo/{repo_name}/logs`
- **Method**`GET`
### 请求参数body
| 参数 | 类型 | 示例输入 | 是否必须 | 说明 |
|-----------|--------| --- |------|------|
| repo_name | string | | yes | 仓库名称 |
| branch_id | int | | no | 分支id |
| 参数 | 类型 | 示例输入 | 是否必须 | 说明 |
|--------------|--------|---------|------| --- |
| repo_name | string | | yes | 仓库名称 |
| branch_id | string | 1,2,3 | no | 分支id |
| page_num | int | 默认1 | no | 页数 |
| page_size | int | 默认10 | no | 条数 |
| create_sort | bool | 默认False | no |创建时间排序, 默认倒序|
注: 获取仓库粒度的同步日志时无需输入分支id

View File

@ -16,7 +16,7 @@ from src.router import SYNC_CONFIG as router
from src.do.sync_config import SyncDirect
from src.dto.sync_config import SyncRepoDTO, SyncBranchDTO, LogDTO, ModifyRepoDTO
from src.service.sync_config import SyncService, LogService
from src.service.cronjob import sync_repo_task, sync_branch_task
from src.service.cronjob import sync_repo_task, sync_branch_task, modify_repos
from src.base.status_code import Status, SYNCResponse, SYNCException
from src.service.cronjob import GITMSGException
@ -219,8 +219,15 @@ class SyncDirection(Controller):
repo_name: str = Path(..., description="仓库名称"),
dto: ModifyRepoDTO = Body(..., description="更新仓库地址信息")
):
api_log(LogType.INFO, f"用户 {user} 使用 PUT 方法访问接口 {request.url.path} ", user)
api_log(LogType.INFO, f"用户 {user} 使用 PUT 方法访问接口 {request.url.path} 更新仓库信息", user)
data = await self.service.update_repo_addr(repo_name=repo_name, dto=dto)
try:
await modify_repos(repo_name, user)
except GITMSGException as GITError:
return SYNCResponse(
code_status=GITError.status,
msg=GITError.msg
)
return SYNCResponse(
code_status=data.code_status,
msg=data.status_msg
@ -257,12 +264,13 @@ class SyncDirection(Controller):
async def get_logs(
self, request: Request, user: str = Depends(user),
repo_name: str = Path(..., description="仓库名称"),
branch_id: int = Query(None, description="分支id仓库粒度无需输入"),
branch_id: str = Query(None, description="分支id仓库粒度无需输入"),
page_num: int = Query(1, description="页数"), page_size: int = Query(10, description="条数"),
create_sort: bool = Query(False, description="创建时间排序, 默认倒序")
):
api_log(LogType.INFO, f"用户 {user} 使用 GET 方法访问接口 {request.url.path} ", user)
data = await self.log_service.get_logs(repo_name=repo_name, branch_id=branch_id,
branch_id_list = branch_id.split(',')
data = await self.log_service.get_logs(repo_name=repo_name, branch_id_list=branch_id_list,
page_num=page_num, page_size=page_size, create_sort=create_sort)
if not data:
return SYNCResponse(

View File

@ -1,4 +1,4 @@
from sqlalchemy import select, update, func
from sqlalchemy import select, update, func, and_, or_
from sqlalchemy.exc import NoResultFound
from src.do.sync_config import SyncBranchMapping, SyncRepoMapping, LogDO
from .mysql_ao import MysqlAO
@ -283,13 +283,17 @@ class LogDAO(BaseDAO, metaclass=Singleton):
await session.execute(stmt)
await session.commit()
async def get_log(self, repo_name: str, branch_id: int, page_number: int, page_size: int, create_sort: bool) -> List[LogDTO]:
async def get_log(self, repo_name: str, branch_id_list: List[str], page_number: int, page_size: int, create_sort: bool) -> List[LogDTO]:
async with self._async_session() as session:
async with session.begin():
stmt = select(LogDO).where(LogDO.repo_name == repo_name, LogDO.branch_id == branch_id)
branch_id_list = [int(branch_id) for branch_id in branch_id_list]
query = select(LogDO).where(LogDO.repo_name == repo_name, LogDO.branch_id.in_(branch_id_list))
# stmt = select(LogDO).where(LogDO.repo_name == repo_name, LogDO.branch_id == branch_id)
# stmt = stmt.order_by(create_order).offset((page_number - 1) * page_size).limit(page_size)
create_order = LogDO.created_at if create_sort else LogDO.created_at.desc()
stmt = stmt.order_by(create_order).offset((page_number - 1) * page_size).limit(page_size)
do_list: List[LogDO] = (await session.execute(stmt)).scalars().all()
query = query.order_by(create_order)
query = query.offset((page_number - 1) * page_size).limit(page_size)
do_list: List[LogDO] = (await session.execute(query)).scalars().all()
datas = []
for do in do_list:
data = LogDTO(

View File

@ -9,7 +9,7 @@ from src.base.config import SYNC_DIR
from src.dao.sync_config import SyncRepoDAO, SyncBranchDAO
from src.do.sync_config import SyncDirect, SyncType
from src.dto.sync_config import SyncBranchDTO
from src.utils.sync_log import sync_log, LogType, log_path
from src.utils.sync_log import sync_log, LogType, log_path, api_log
from src.service.sync_config import LogService
sync_repo_dao = SyncRepoDAO()
@ -46,8 +46,8 @@ def shell(cmd, dire: str, log_name: str, user: str):
log = f'Execute cmd: ' + cmd
if 'git clone' in log:
sync_log(LogType.INFO, 'Execute cmd: git clone', log_name, user)
elif 'git remote add' in log:
sync_log(LogType.INFO, 'Execute cmd: git remote add', log_name, user)
elif 'git remote' in log:
sync_log(LogType.INFO, '添加/更新仓库信息', log_name, user)
elif 'git ls-remote' in log:
sync_log(LogType.INFO, '获取仓库分支信息', log_name, user)
else:
@ -84,34 +84,44 @@ def inter_to_outer(repo, branch, log_name: str, user: str):
repo_dir = os.path.join(SYNC_DIR, repo.repo_name)
inter_name = branch.internal_branch_name
outer_name = branch.external_branch_name
# 从internal仓库的指定分支inter_name中获取代码更新远程分支的信息到本地仓库
shell(f"git fetch internal {inter_name}", repo_dir, log_name, user)
# 切换到inter_name分支并将internal仓库的分支强制 checkout 到当前分支。
shell(f"git checkout -B {inter_name} internal/{inter_name}", repo_dir, log_name, user)
# 将本地仓库的inter_name分支推送到external仓库的outer_name分支上。
shell(f"git push external {inter_name}:{outer_name}", repo_dir, log_name, user)
# commit id
result = shell(f"git log HEAD~1..HEAD --oneline", repo_dir, log_name, user)
commit_id = result.stdout.split(" ")[0]
sync_log(LogType.INFO, f'[COMMIT ID: {commit_id}]', log_name, user)
return commit_id
try:
# 从internal仓库的指定分支inter_name中获取代码更新远程分支的信息到本地仓库
shell(f"git fetch internal {inter_name}", repo_dir, log_name, user)
# 切换到inter_name分支并将internal仓库的分支强制 checkout 到当前分支。
shell(f"git checkout -B {inter_name} internal/{inter_name}", repo_dir, log_name, user)
# 将本地仓库的inter_name分支推送到external仓库的outer_name分支上。
shell(f"git push external {inter_name}:{outer_name}", repo_dir, log_name, user)
# commit id
# result = shell(f"git log HEAD~1..HEAD --oneline", repo_dir, log_name, user)
# commit_id = result.stdout.split(" ")[0]
result = shell(f'git log -1 --format="%H"', repo_dir, log_name, user)
commit_id = result.stdout[0:7]
sync_log(LogType.INFO, f'[COMMIT ID: {commit_id}]', log_name, user)
return commit_id
except Exception as e:
raise
def outer_to_inter(repo, branch, log_name: str, user: str):
repo_dir = os.path.join(SYNC_DIR, repo.repo_name)
inter_name = branch.internal_branch_name
outer_name = branch.external_branch_name
# 从external仓库的指定分支outer_name中获取代码更新远程分支的信息到本地仓库
shell(f"git fetch external {outer_name}", repo_dir, log_name, user)
# 切换到本地仓库的outer_name分支并将origin仓库的outer_name分支强制 checkout 到当前分支。
shell(f"git checkout -B {outer_name} external/{outer_name}", repo_dir, log_name, user)
# 将本地仓库的outer_name分支推送到internal仓库的inter_name分支上。
shell(f"git push internal {outer_name}:{inter_name}", repo_dir, log_name, user)
# commit id
result = shell(f"git log HEAD~1..HEAD --oneline", repo_dir, log_name, user)
commit_id = result.stdout.split(" ")[0]
sync_log(LogType.INFO, f'[COMMIT ID: {commit_id}]', log_name, user)
return commit_id
try:
# 从external仓库的指定分支outer_name中获取代码更新远程分支的信息到本地仓库
shell(f"git fetch external {outer_name}", repo_dir, log_name, user)
# 切换到本地仓库的outer_name分支并将origin仓库的outer_name分支强制 checkout 到当前分支。
shell(f"git checkout -B {outer_name} external/{outer_name}", repo_dir, log_name, user)
# 将本地仓库的outer_name分支推送到internal仓库的inter_name分支上。
shell(f"git push internal {outer_name}:{inter_name}", repo_dir, log_name, user)
# commit id
# result = shell(f"git log HEAD~1..HEAD --oneline", repo_dir, log_name, user)
# commit_id = result.stdout.split(" ")[0]
result = shell(f'git log -1 --format=%h', repo_dir, log_name, user)
commit_id = result.stdout[0:7]
sync_log(LogType.INFO, f'[COMMIT ID: {commit_id}]', log_name, user)
return commit_id
except Exception as e:
raise
async def sync_repo_task(repo, user):
@ -176,3 +186,16 @@ async def sync_branch_task(repo, branches, direct, user):
await log_service.insert_branch_log(repo.repo_name, direct, branch.id, commit_id)
os.remove(os.path.join(log_path, log_name))
async def modify_repos(repo_name, user: str):
repo = await sync_repo_dao.get(repo_name=repo_name)
not os.path.exists(SYNC_DIR) and os.makedirs(SYNC_DIR)
repo_dir = os.path.join(SYNC_DIR, repo.repo_name)
log_name = f'update_{repo.repo_name}.log'
if os.path.exists(repo_dir):
inter_repo_addr = get_repo_address_with_token(repo.internal_repo_address, repo.inter_token)
exter_repo_addr = get_repo_address_with_token(repo.external_repo_address, repo.exter_token)
# 更新internal远程仓库
shell(f'git remote set-url internal {inter_repo_addr}', repo_dir, log_name, user)
# 更新external远程仓库
shell(f'git remote set-url external {exter_repo_addr}', repo_dir, log_name, user)

View File

@ -211,7 +211,7 @@ class LogService(Service):
# else:
# await self.sync_log_dao.update_branch_log(repo_name, direct, branch_id, commit_id, log_content)
async def get_logs(self, repo_name: str, branch_id: int, page_num: int, page_size: int, create_sort: bool) -> Optional[List[LogDTO]]:
logs = await self.sync_log_dao.get_log(repo_name=repo_name, branch_id=branch_id,
async def get_logs(self, repo_name: str, branch_id_list: List[str], page_num: int, page_size: int, create_sort: bool) -> Optional[List[LogDTO]]:
logs = await self.sync_log_dao.get_log(repo_name=repo_name, branch_id_list=branch_id_list,
page_number=page_num, page_size=page_size, create_sort=create_sort)
return logs