-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathsonarcheck_analyzer.py
134 lines (105 loc) · 4.26 KB
/
sonarcheck_analyzer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
#!/usr/bin/env python3
# Analyzer that wraps https://github.com/bblfsh/sonar-checks
from concurrent.futures import ThreadPoolExecutor
import os
import time
import grpc
import collections
import logging
from lookout.sdk import pb
from lookout.sdk.service_data import DataStub
from lookout.sdk.grpc import to_grpc_address, create_channel, create_server, \
LogUnaryServerInterceptor, LogStreamServerInterceptor, \
LogUnaryClientInterceptor, LogStreamClientInterceptor
from bblfsh_sonar_checks import run_checks, list_checks
from bblfsh_sonar_checks.utils import list_langs
from bblfsh import filter as filter_uast
version = "alpha"
host_to_bind = os.getenv('SONARCHECK_HOST', "0.0.0.0")
port_to_listen = os.getenv('SONARCHECK_PORT', 9930)
data_srv_addr = to_grpc_address(
os.getenv('SONARCHECK_DATA_SERVICE_URL', "ipv4://localhost:10301"))
log_level = os.getenv('SONARCHECK_LOG_LEVEL', "info").upper()
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
logger.addHandler(handler)
logger.setLevel(log_level)
langs = list_langs()
def log_fn(log_fields, msg):
logger.debug("{msg} [{log_fields}]".format(
msg=msg, log_fields=log_fields.fields))
class Analyzer(pb.AnalyzerServicer):
def notify_review_event(self, request, context):
logger.debug("got review request %s", request)
comments = []
# client connection to DataServe
with create_channel(data_srv_addr, interceptors=[
LogUnaryClientInterceptor(log_fn),
LogStreamClientInterceptor(log_fn),
]) as channel:
stub = DataStub(channel)
changes = stub.get_changes(
context,
pb.ChangesRequest(
head=request.commit_revision.head,
base=request.commit_revision.base,
want_contents=False,
want_uast=True,
exclude_vendored=True,
include_languages=langs))
for change in changes:
if not change.HasField("head"):
continue
logger.debug("analyzing '%s' in %s",
change.head.path, change.head.language)
try:
check_results = run_checks(
list_checks(change.head.language.lower()),
change.head.language.lower(),
change.head.uast
)
except Exception as e:
logger.exception("Error during analyzing file '%s' in commit '%s': %s",
change.head.path, request.commit_revision.head.hash, e)
continue
for check in check_results:
for res in check_results[check]:
comments.append(
pb.Comment(
file=change.head.path,
line=(res.get("pos", {}) or {}).get("line", 0),
text="{}: {}".format(check, res["msg"])))
logger.info("%d comments produced", len(comments))
return pb.EventResponse(analyzer_version=version, comments=comments)
def notify_push_event(self, request, context):
return pb.EventResponse(analyzer_version=version)
def serve():
server = create_server(10, interceptors=[
LogUnaryServerInterceptor(log_fn),
LogStreamServerInterceptor(log_fn),
])
pb.add_analyzer_to_server(Analyzer(), server)
server.add_insecure_port("{}:{}".format(host_to_bind, port_to_listen))
server.start()
one_day_sec = 60*60*24
try:
while True:
time.sleep(one_day_sec)
except KeyboardInterrupt:
server.stop(0)
def print_check_stats():
num_checks = 0
all_checks = collections.defaultdict(list)
for lang in langs:
checks = list_checks(lang)
all_checks[lang].append(checks)
num_checks += len(checks)
logger.info("%d langs, %d checks supported", len(langs), num_checks)
logger.debug("Langs: %s", langs)
logger.debug("Checks: %s", checks)
def main():
logger.info("starting gRPC Analyzer server at port %s", port_to_listen)
print_check_stats()
serve()
if __name__ == "__main__":
main()