7
这两个拦截器分别作为rpc默认客户端拦截器和rpc默认服务端拦截器:
DurationInterceptor:
// zrpc/internal/clientinterceptors/durationinterceptor.go
// DurationInterceptor is an interceptor that logs the processing time.
func DurationInterceptor(ctx context.Context, method string, req, reply interface{},
cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
serverName := path.Join(cc.Target(), method)
start := timex.Now()
err := invoker(ctx, method, req, reply, cc, opts...)
if err != nil {
logx.WithContext(ctx).WithDuration(timex.Since(start)).Infof("fail - %s - %v - %s",
serverName, req, err.Error())
} else {
elapsed := timex.Since(start)
if elapsed > slowThreshold {
logx.WithContext(ctx).WithDuration(elapsed).Slowf("[RPC] ok - slowcall - %s - %v - %v",
serverName, req, reply)
}
}
return err
}
UnaryStatInterceptor:
// zrpc/internal/serverinterceptors/statinterceptor.go
// UnaryStatInterceptor returns a func that uses given metrics to report stats.
func UnaryStatInterceptor(metrics *stat.Metrics) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo,
handler grpc.UnaryHandler) (resp interface{}, err error) {
defer handleCrash(func(r interface{}) {
err = toPanicError(r)
})
startTime := timex.Now()
defer func() {
duration := timex.Since(startTime)
metrics.Add(stat.Task{
Duration: duration,
})
logDuration(ctx, info.FullMethod, req, duration)
}()
return handler(ctx, req)
}
}
func logDuration(ctx context.Context, method string, req interface{}, duration time.Duration) {
var addr string
client, ok := peer.FromContext(ctx)
if ok {
addr = client.Addr.String()
}
content, err := json.Marshal(req)
if err != nil {
logx.WithContext(ctx).Errorf("%s - %s", addr, err.Error())
} else if duration > serverSlowThreshold {
logx.WithContext(ctx).WithDuration(duration).Slowf("[RPC] slowcall - %s - %s - %s",
addr, method, string(content))
} else {
logx.WithContext(ctx).WithDuration(duration).Infof("%s - %s - %s", addr, method, string(content))
}
}
当消息体较大时,比如3MB吧,发生错误会把整个消息体序列化并输出到日志,导致日志文件极其庞大并且不好分析。