Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

更新TCAQL版本 #589

Merged
merged 2 commits into from
Aug 31, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions client/tool/tca_ql_js.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2021-2022 THL A29 Limited
#
# This source code file is made available under MIT License
# See LICENSE for details
# ==============================================================================


from util.logutil import LogPrinter
from tool.util.tca_ql import TcaQl


logger = LogPrinter()


class TcaQlJs(TcaQl):
def __init__(self, params):
super().__init__(params)

def compile(self, params):
lang = "js"
super().compile(params, lang)

def analyze(self, params):
lang = "js"
issues = super().analyze(params, lang)
return issues

tool = TcaQlJs

if __name__ == "__main__":
pass
173 changes: 112 additions & 61 deletions client/tool/util/tca_ql.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

import os
import shutil


import json

try:
Expand All @@ -28,15 +30,15 @@
from util.configlib import ConfigReader
from util.envset import EnvSet
from util.errcode import E_NODE_TASK_PARAM
from util.exceptions import TaskError
from util.exceptions import AnalyzeTaskError, TaskError
from util.pathfilter import FilterPathUtil
from util.pathlib import PathMgr
from task.codelintmodel import CodeLintModel
from task.scmmgr import SCMMgr
from util.subprocc import SubProcController
from util.zipmgr import Zip
from util.logutil import LogPrinter
from task.authcheck.check_license import __lu__
from task.codelintmodel import CodeLintModel
from task.basic.common import subprocc_log
from task.scmmgr import SCMMgr


logger = LogPrinter()
Expand All @@ -46,38 +48,50 @@
"php": [".php"],
"go": [".go"],
"cpp": [".cpp", ".c", ".C", ".cc", ".cxx", ".h", ".hxx", ".hpp"],
"js": [".js", ".jsx"],
}


class TcaQl(CodeLintModel):
""" """

def __init__(self, params):
CodeLintModel.__init__(self, params)

# 没有使用mysql数据库,采用sqlite所以需要文件服务器存储数据用于增量分析
# 如果使用mysql数据库可以考虑替换掉数据库地址参数代替下载上传数据文件
def __download_database(self, params, path):
"""下载之前存储的数据库,增量使用"""
base_dir = f"tcaql/repos/{params.repo_id}"
try:
file_name = os.path.join(base_dir, f"{path}.db")
db_path = os.path.join(params["work_dir"], "db", f"{path}.db")
file_name = os.path.join(base_dir, f"{path}.zip")
db_path = os.path.join(params["work_dir"], "db")
zip_file = os.path.join(db_path, f"{path}.zip")
logger.info(f"开始下载文件{file_name}")
file_server = RetryFileServer(retry_times=2).get_server()
if file_server.download_file(file_name, db_path):
if file_server.download_big_file(file_name, zip_file):
logger.info("下载成功")
Zip.decompress(zip_file, db_path)
return True
except:
logger.warning("下载失败")
except Exception as e:
logger.warning(f"下载失败 {e}")
return False

def __upload_database(self, params, path):
"""上传数据库"""
logger.info("准备上传到云端存储数据库")
base_dir = f"tcaql/repos/{params.repo_id}"
try:
logger.info("开始上传")
upload_file_name = os.path.join(base_dir, f"{path}.db")
db_dir = os.path.join(params["work_dir"], "db")
file_name = os.path.join(db_dir, f"{path}.db")
file_server = RetryFileServer(retry_times=2).get_server()
file_server.upload_file(file_name, upload_file_name)
logger.info("上传成功")
upload_file_name = os.path.join(base_dir, f"{path}.zip")
cache_dir = os.path.join(params["work_dir"], "db")
cache_zip = os.path.join(params["work_dir"], f"{path}.zip")
if os.path.exists(cache_zip):
os.remove(cache_zip)
if Zip().compress(cache_dir, cache_zip):
file_server = RetryFileServer(retry_times=2).get_server()
file_server.upload_file(cache_zip, upload_file_name)
logger.info("上传成功")
except:
logger.warning("上传失败")
return False
Expand Down Expand Up @@ -115,7 +129,11 @@ def __generate_config_file(self, rule_list, work_dir, source_dir, toscans):
return setting_file

def compile(self, params, lang):
"""
编译函数,指代码生成数据流
"""
source_dir = params.source_dir
relpos = len(source_dir) + 1
logger.info("开始编译项目 %s" % source_dir)
work_dir = params.work_dir
db_dir = os.path.join(work_dir, "db")
Expand All @@ -127,80 +145,105 @@ def compile(self, params, lang):
db_name = f"{repo_id}_{scm_revision}_{lang}"
db_path = os.path.join(db_dir, db_name + ".db")
inc = params["incr_scan"]
want_suffix = lang_map[lang]
logger.info("是否为增量编译: %s" % inc)
file_list = os.path.join(work_dir, "filelist.txt")
if not os.path.exists(db_dir):
if not os.path.exists(db_dir):
os.makedirs(db_dir)
if self.__download_database(params, db_name):
if os.path.exists(db_path):
return
if inc:
last_scm_revision = params["scm_last_revision"]
last_db_name = f"{repo_id}_{last_scm_revision}_{lang}"
last_db_path = os.path.join(db_dir, last_db_name + ".db")
logger.info(f"下载上个成功分析版本数据库{last_db_name}")
if not self.__download_database(params, last_db_name):
logger.info("下载数据库失败将重新生成")
logger.info("下载全量数据库失败将重新生成,本次分析将只分析增量部分")
else:
shutil.copyfile(last_db_path, db_path)
want_suffix = lang_map[lang]
diffs = SCMMgr(params).get_scm_diff()
toscans = [diff.path.replace(os.sep, "/") for diff in diffs if diff.path.endswith(tuple(want_suffix))]
inc_build_cmd = [
# "./Zeus",
"inc_compile",
"-l",
lang,
"-p",
db_name,
"-db",
db_dir,
"-s",
source_dir,
"-f",
]
logger.info(inc_build_cmd)
pre_cmd_len = len(inc_build_cmd)
CMD_ARG_MAX = PathMgr().get_cmd_arg_max()
LogPrinter.info("命令行长度限制:%d" % CMD_ARG_MAX)
cmd_args_list = PathMgr().get_cmd_args_list(inc_build_cmd, toscans, CMD_ARG_MAX)
for cmd in cmd_args_list:
tmp_cmd = inc_build_cmd + [",".join(cmd[pre_cmd_len:])]
tmp_cmd = self.get_cmd(tool_path, tmp_cmd)
sp = SubProcController(
command=tmp_cmd,
cwd=ZEUS_HOME,
stdout_line_callback=subprocc_log,
stderr_line_callback=subprocc_log,
)
sp.wait()
logger.info(sp.returncode)
self.__upload_database(params, db_name)
return
diffs = SCMMgr(params).get_scm_diff()
# 增量需要所有增量文件都重新生成,故这里不能过滤文件
toscans = [diff.path.replace(os.sep, "/") for diff in diffs if diff.path.endswith(tuple(want_suffix))]
toscans = FilterPathUtil(params).get_include_files(toscans, relpos)
if not toscans:
return []
with open(file_list, "w") as wf:
for toscan in toscans:
wf.writelines(toscan)
wf.write("\n")
inc_build_cmd = [
"./Zeus",
"inc_compile",
"-l",
lang,
"-p",
db_name,
"-db",
db_dir,
"-s",
source_dir,
# "-d",
"-f",
file_list,
]
logger.info(inc_build_cmd)
inc_build_cmd = self.get_cmd(tool_path, inc_build_cmd)
# cmd_args += toscans
sp = SubProcController(
command=inc_build_cmd,
cwd=ZEUS_HOME,
stdout_line_callback=self.subprocc_log,
stderr_line_callback=self.subprocc_log,
)
sp.wait()
logger.info(sp.returncode)
self.__upload_database(params, db_name)
return
# 全量编译命令
toscans = [path.replace(os.sep, "/") for path in PathMgr().get_dir_files(source_dir, tuple(want_suffix))]
toscans = FilterPathUtil(params).get_include_files(toscans, relpos)
if not toscans:
return []
with open(file_list, "w") as wf:
for toscan in toscans:
wf.writelines(toscan)
wf.write("\n")
full_build_cmd = [
# "./Zeus",
"compile",
"-p",
db_name,
"-db",
"-cc",
db_dir,
"-l",
lang,
"-s",
source_dir,
"-f",
file_list
# "-d", # 调试使用
]
logger.info(full_build_cmd)
full_build_cmd = self.get_cmd(tool_path, full_build_cmd)
logger.info(" ".join(full_build_cmd))
sp = SubProcController(
command=full_build_cmd,
cwd=ZEUS_HOME,
stdout_line_callback=subprocc_log,
stderr_line_callback=subprocc_log,
stdout_line_callback=self.subprocc_log,
stderr_line_callback=self.subprocc_log,
)
sp.wait()
logger.info(sp.returncode)
self.__upload_database(params, db_name)
return

def analyze(self, params, lang):
"""
tca ql 工具分析函数
:param params: 执行需要的参数
:return :
"""
source_dir = params.source_dir
relpos = len(source_dir) + 1
work_dir = params.work_dir
Expand All @@ -217,7 +260,8 @@ def analyze(self, params, lang):
os.makedirs(db_dir)
logger.info(f"本地未找到数据库文件{db_path},从文件服务器下载")
if not self.__download_database(params, db_name):
raise TaskError(E_NODE_TASK_PARAM, "数据库读取失败可能为选错了语言")
logger.info("本地没有找到数据库,缓存数据库下载失败,可能分析文件为空")
return []
rules = params["rule_list"]
inc = params["incr_scan"]
want_suffix = lang_map[lang]
Expand Down Expand Up @@ -248,24 +292,27 @@ def analyze(self, params, lang):
"analyze",
"-l",
lang,
"-cc",
db_dir,
"-db",
db_path,
db_name,
"-o",
output_json,
"-c",
setting_file,
# "-d",
]
logger.info(analyze_cmd)
task_dir = os.path.dirname(os.getcwd())
request_file = os.path.abspath(os.path.join(task_dir, "task_request.json"))
os.environ["TASK_REQUEST"] = request_file
issues = []
analyze_cmd = self.get_cmd(tool_path, analyze_cmd)
logger.info(" ".join(analyze_cmd))
sp = SubProcController(
command=analyze_cmd,
cwd=HADES_HOME,
stdout_line_callback=subprocc_log,
stderr_line_callback=subprocc_log,
stdout_line_callback=self.subprocc_log,
stderr_line_callback=self.subprocc_log,
env=EnvSet().get_origin_env(),
)
sp.wait()
Expand All @@ -275,10 +322,14 @@ def analyze(self, params, lang):
issues.extend(result)
else:
logger.warning("未生成结果文件")
if os.path.exists(db_path):
self.__upload_database(params, db_name)
# if os.path.exists(db_path):
# self.__upload_database(params, db_name)
return issues

def subprocc_log(self, line):
""""""
logger.info(line)

def get_cmd(self, tool_path, options):
return __lu__().format_cmd(tool_path, options)

Expand Down
Loading