Commit 63d1ed45 authored by 张鹏程's avatar 张鹏程

催收特征

parent 1ba38314
No preview for this file type
......@@ -456,7 +456,7 @@ background-color:yellow;
TestResult = unittest.TestResult
from testcase import test_trans_v2_read as tt
from features import test_trans_v2_read as tt
class _TestResult(TestResult):
# note: _TestResult is a pure representation of results.
# It lacks the output and reporting ability compares to unittest._TextTestResult.
......
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}
This source diff could not be displayed because it is too large. You can view the blob instead.
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"columns = pd.read_excel('/Users/zhangpengcheng/Documents/量化派代码管理/feature-api-test/data/催收模型特征_v1.xlsx',sheet_name='字段对应关系')"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"scrolled": true
},
"outputs": [
{
"data": {
"text/plain": [
"{'age_account': '账龄',\n",
" 'applied_from': '申请渠道',\n",
" 'apply_cnt_all': '现金分期 申请次数',\n",
" 'contract_loan_amt': '现金贷合同金额',\n",
" 'decline_cnt_all': '用户现金贷额度申请被拒绝次数',\n",
" 'delq_amt_sum_r': '逾期总金额/总额度',\n",
" 'fstLoan_delq_day': '首次提现最大逾期天数',\n",
" 'ind_curr_max_sumDelqAmt_rct_3mon': '当前累计逾期金额是否为近3个月内最高',\n",
" 'ind_curr_max_sumDelqAmt_rct_6mon': '当前累计逾期金额是否为近6个月内最高',\n",
" 'lst2_delq_trend': '最近两期逾期趋势',\n",
" 'lstLoan_delq_day_max': '上次提现最大逾期天数',\n",
" 'lstLoan_fstTerm_delq_day': '上次提现首期逾期天数',\n",
" 'lstRepay_delq_day': '最后一次还款逾期天数',\n",
" 'max_delq_days': '历史最大逾期天数',\n",
" 'monthly_pmt': '每期应还本金、利息、服务费之和',\n",
" 'pmt_itv_ratio_trend': '到期期数中提前和正常还款的占比',\n",
" 'rct_pmt_days': '最近一次还款距今天数',\n",
" 'repay_amt_sum_rct_6mon': '近6个月贷款还款金额总和'}"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"dict(zip(columns['需求字段名'].tolist(),columns['字段含义'].tolist()))"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2018-12-17\n"
]
}
],
"source": [
"import datetime\n",
"from dateutil.relativedelta import relativedelta\n",
"\n",
"if __name__ == \"__main__\":\n",
" print(datetime.date.today() - relativedelta(months=+1))"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2018-10-17\n"
]
}
],
"source": [
"print(datetime.date.today() )\n"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"ename": "TypeError",
"evalue": "must be str, not datetime.datetime",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-20-e563f762119c>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mdatetime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdatetime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstrptime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdatetime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdatetime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnow\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'%Y-%m-%d 00:00:00'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m#- relativedelta(months=+3)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m: must be str, not datetime.datetime"
]
}
],
"source": [
"datetime.datetime.strptime(datetime.datetime.now(),'%Y-%m-%d 00:00:00') #- relativedelta(months=+3)"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [],
"source": [
"aa = datetime.datetime.now() - relativedelta(months=+3)"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'2018-10-17 00:00:00'"
]
},
"execution_count": 26,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"datetime.datetime.strftime(datetime.datetime.now() - relativedelta(months=+3),'%Y-%m-%d 00:00:00')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"{'age_account': {'name': '账龄', 'value': 466},\n",
" 'applied_from': {'name': '申请渠道', 'value': 217},\n",
" 'apply_cnt_all': {'name': '现金分期 申请次数'},\n",
" 'contract_loan_amt': {'name': '现金贷合同金额', 'value': 8000.0},\n",
" 'decline_cnt_all': {'name': '用户现金贷额度申请被拒绝次数'},\n",
" 'delq_amt_sum_r': {'name': '逾期总金额/总额度'},\n",
" 'fstLoan_delq_day': {'name': '首次提现最大逾期天数'},\n",
" 'ind_curr_max_sumDelqAmt_rct_3mon': {'name': '当前累计逾期金额是否为近3个月内最高'},\n",
" 'ind_curr_max_sumDelqAmt_rct_6mon': {'name': '当前累计逾期金额是否为近6个月内最高'},\n",
" 'lst2_delq_trend': {'name': '最近两期逾期趋势'},\n",
" 'lstLoan_delq_day_max': {'name': '上次提现最大逾期天数', 'value': 2},\n",
" 'lstLoan_fstTerm_delq_day': {'name': '上次提现首期逾期天数'},\n",
" 'lstRepay_delq_day': {'name': '最后一次还款逾期天数'},\n",
" 'max_delq_days': {'name': '历史最大逾期天数', 'value': 2},\n",
" 'monthly_pmt': {'name': '每期应还本金、利息、服务费之和'},\n",
" 'pmt_itv_ratio_trend': {'name': '到期期数中提前和正常还款的占比'},\n",
" 'rct_pmt_days': {'name': '最近一次还款距今天数', 'value': 14},\n",
" 'repay_amt_sum_rct_6mon': {'name': '近6个月贷款还款金额总和'}}"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {},
"outputs": [],
"source": [
"aa = {}\n",
"age_account = {}"
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {},
"outputs": [
{
"ename": "KeyError",
"evalue": "'values'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-32-e24208af8c51>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0maa\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msetdefault\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mage_account\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'values'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mKeyError\u001b[0m: 'values'"
]
}
],
"source": [
"aa.setdefault(age_account['values'],2)"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'aa': 2}"
]
},
"execution_count": 30,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"aa"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.4.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
This diff is collapsed.
This diff is collapsed.
......@@ -623,7 +623,7 @@
"cell_type": "code",
"execution_count": 20,
"metadata": {
"scrolled": true
"scrolled": false
},
"outputs": [
{
......@@ -9,3 +9,7 @@ user = lkbadmin_feature_r
password = uij89knb762er4cgs67
port = 33403
db = lkbadmin
[path]
data_path = /data/
report_path = /report/
\ No newline at end of file
......@@ -2,10 +2,11 @@ import os
FILE_PATH = os.path.realpath(os.curdir)
FEATURE_HOST = 'http://172.20.1.131:23010'
FEATURE_HOST_MY = 'http://localhost:23010'
FEATURE_HOST_MY = 'http://localhost:23010'
url_calc_features = '/calc/features'
url_trans_v2_read = '/features/trans_v2/read'
url_new_audit = '/features/new_audit'
url_complex_order = '/features/complex_order'
url_collection = '/features/collection'
url_collection = '/features/loanPostFeatures'
File added
# -*- coding:utf-8 -*-
from service.Sql_server import *
from collections import defaultdict
from dateutil.relativedelta import relativedelta
import datetime
def calc_features(loanId='',termNo='',user_id=None,orderId=None,apply_time=None,mapping =None):
if user_id == '':
return None
_result = defaultdict(dict)
for key,values in mapping.items():
_result[str(key)]["name"] = values
#历史最大逾期天数 : 用户申请拒绝次数(一单一审放款拒绝和循环额度申请拒绝)
_df_loan_refuse_cnt_old = get_sql_loan_refuse_cnt_old(user_id)
_df_loan_refuse_cnt_new = get_sql_loan_refuse_cnt_new(user_id)
user_apply_refuse_cnt = _df_loan_refuse_cnt_old['count'].values[0] + _df_loan_refuse_cnt_new['count'].values[0]
_result['user_apply_refuse_cnt']['value'] = user_apply_refuse_cnt
return dict(_result)
# -*- coding:utf-8 -*-
from functools import reduce
from datetime import datetime
from collections import defaultdict
from service.Sql_server import *
def complexOrder(user_id):
_result = defaultdict(dict)
columns = {'current_loan_0duedays_rate': '在贷逾期0天以上的占比',
'current_loan_5duedays_rate': '在贷逾期5天以上的占比',
'current_loan_apply_cnt': '在贷申请次数',
'current_loan_avg_repay': '在贷平均每期还款金额',
'current_loan_avg_repaypp': '在贷平均每期还款本金',
'current_loan_max_duedays': '在贷订单的最大的逾期天数',
'current_loan_max_duepp': '在贷订单的最大的逾期本金',
'current_loan_max_repay': '在贷最大的应还金额',
'current_loan_max_repaypp': '在贷最大的应还本金',
'current_loan_max_term': '当前在贷最大期数',
'current_loan_min_repay': '在贷最小的应还金额',
'current_loan_min_repaypp': '在贷最小的应还本金',
'current_loan_order_nums': '在贷订单数',
'current_loan_orderdue_nums': '在贷订单逾期订单数',
'current_loan_orderdue_rate': '在贷订单逾期订单占比',
'current_loan_refuse_cnt': '在贷拒绝次数',
'current_loan_refuse_rate': '在贷拒绝率',
'current_loan_repay_rate': '应还金额在贷率',
'current_loan_repaypp_rate': '应还本金在贷率',
'current_loan_sum_repay': '在贷应还总金额',
'current_loan_sum_repaypp': '在贷应还总本金',
'current_loan_term_nums': '当前在贷总期数',
'current_loan_term_rate': '在贷期数占比',
'days_from_lastdue': '最近一次逾期的距今的天数',
'days_from_lastrepaid': '最近一次的还款时间'}
def complexOrder(user_id,columns):
_result = defaultdict(dict)
df = get_sql_user_in_loan(user_id)
for key,values in columns.items():
_result[key]['name'] = values
......
# -*- coding:utf-8 -*-
import tornado.web
from service.Mapping import Mapping
class BaseHandler(tornado.web.RequestHandler):
# 初始化映射关系
mapping = Mapping()
# 认证
def prepare(self):
self.set_header('Content-Type', "application/json; charset=UTF-8")
......
# -*- coding:utf-8 -*-
import time
from handler.BaseHandler import BaseHandler
from testcase import test_collection
from features import feature_collection
from utils import JsonUtil
from service.Sql_server import *
import json
......@@ -11,14 +11,24 @@ class Collection(BaseHandler):
_business_type = self.get_argument('business_type', default=1)
_uuid = self.get_argument('user_uuid', default=None)
_orderId = self.get_argument('order_id', default=None)
_loanId = self.get_argument('loanId', default=None)
_loanId = self.get_argument('loan_id', default=None)
_termNo = self.get_argument('term_no',default=None)
_user_id = int(get_sql_user_id(_uuid))
_result = test_collection.collection(_loanId,_termNo,_user_id,_orderId)
_result = feature_collection.collection(_loanId,_termNo,_user_id,_orderId,
self.mapping.Mapping_Collection)
for key,value in self.mapping.Mapping_Collection_rename.items():
if key in _result.keys():
_result[value] = _result.pop(key)
self.write(JsonUtil.build_json_feature(businessType=int(_business_type), orderId=_orderId, loanId=_loanId,
user_uuid = _uuid,user_id=_user_id,_term_no = _termNo,
if 'code' in _result.keys():
self.write(JsonUtil.build_json_feature(businessType=int(_business_type), orderId=_orderId, loanId=int(_loanId),
userUuid = _uuid,user_id=_user_id,term_no = int(_termNo),
massage = _result['massage'],
code=JsonUtil.Constants.Code_Error_Value))
else:
self.write(JsonUtil.build_json_feature(businessType=int(_business_type), orderId=_orderId, loanId=int(_loanId),
userUuid = _uuid,user_id=_user_id,term_no = int(_termNo),
features=_result, flag=True,code=JsonUtil.Constants.Code_Success))
self.flush()
......
# -*- coding:utf-8 -*-
import time
from handler.BaseHandler import BaseHandler
from testcase import test_complex_order
from features import feature_complex_order
from utils import JsonUtil
from service.Sql_server import *
......@@ -11,12 +11,14 @@ class ComplexOrder(BaseHandler): # 多订单接口
_business_type = self.get_argument('business_type', default=1)
_uuid = self.get_argument('user_uuid', default=None)
_orderId = self.get_argument('order_id', default=None)
_loanId = self.get_argument('loanId', default=None)
_loanId = self.get_argument('loan_id', default=None)
_user_id = int(get_sql_user_id(_uuid))
_result = test_complex_order.complexOrder(_user_id)
_result = feature_complex_order.complexOrder(_user_id,self.mapping.Mapping_Complex_order)
for key,value in self.mapping.Mapping_Complex_order_rename.items():
_result[value] = _result.pop(key)
self.write(JsonUtil.build_json_feature(businessType=int(_business_type), orderId=_orderId, loanId=_loanId,user_uuid = _uuid,user_id=_user_id,
self.write(JsonUtil.build_json_feature(businessType=int(_business_type), orderId=_orderId, loanId=_loanId,userUuid = _uuid,user_id=_user_id,
features=_result, flag=True,code=JsonUtil.Constants.Code_Success))
self.flush()
......
# -*- coding:utf-8 -*-
import time
from handler.BaseHandler import BaseHandler
from features import feature_calc_features
from utils import JsonUtil
from service.Sql_server import *
import json
class Calc_Features(BaseHandler):
def get(self):
try:
_business_type = self.get_argument('business_type', default=1)
_uuid = self.get_argument('user_uuid', default=None)
_orderId = self.get_argument('order_id', default=None)
_loanId = self.get_argument('loan_id', default=None)
_termNo = self.get_argument('term_no', default=None)
_apply_time = self.get_argument('apply_time', default=None)
_user_id = int(get_sql_user_id(_uuid))
_result = feature_calc_features.calc_features(_loanId, _termNo, _user_id, _orderId, _apply_time,
self.mapping.Mapping_Basics)
for key,value in self.mapping.Mapping_Basics_rename.items():
_result[value] = _result.pop(key)
self.write(JsonUtil.build_json_feature(businessType=int(_business_type), orderId=_orderId, loanId=_loanId,
userUuid = _uuid,user_id=_user_id,term_no = _termNo,
features=_result, flag=True,code=JsonUtil.Constants.Code_Success))
self.flush()
self.finish()
except ValueError as e:
# == 数据类型或者字段缺少导致
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
pass
\ No newline at end of file
......@@ -609,3 +609,5 @@ def last_loan_overdue_days(df, uuid):
df_new['last_loan_overdue_days'] = df_new['last_loan_overdue_days'].fillna()
print(df_new['last_loan_overdue_days'].values)
......@@ -3,40 +3,34 @@ from collections import defaultdict
import config.read_properties as cf
def get_response_http(host,url, params = None,file_path = None, method = 'GET',cookies = None):
def get_response_http(host,url, params = None,file_path = None, method = 'GET',cookies = None,headers=None):
_init = http(ip=host)
if headers:
_init.set_hearder(headers)
_request = _init.request_Met(url=url,params=params,method=method,cookies=cookies)
_response = _init.getJson(_request)
return _response
if _request.status_code == 200:
_response = _init.getJson(_request)
return _response
def get_response_feature(host,url,params = None,code = None,headers=None,file_path = None, method = 'GET',cookies = None):
_init = http(ip=host)
_request = _init.request_Met(url=url,params=params,method=method,cookies=cookies,headers = headers)
_response = _init.getJson(_request)
return _response
# def get_response_features(host,url,params = None,code = None,headers=None,file_path = None, method = 'GET',cookies = None,semlock = None):
#
# dict_assert = defaultdict(str)
# headers = {
# 'rc_auth_key':'rc_developer',
# 'rc_auth_secret':'rc_secret'
# }
# _init = http(ip=host)
# _request = _init.request_Met(url=url,params=params,method=method,cookies=cookies,headers = headers)
#
# url ='http://172.20.7.12:23010/calc/features'
#
# if _request.status_code == 200 and _request.json()['code'] == 200:
# _response = _init.getJson(_request)
# for key in _response['data']['features']:
# dict_assert[key] = r_json['data']['features'][key]['value']
# # print('-----------------')
#
# if semlock:
# semlock.release()
#
# return dict(dict_assert)
def get_json_feature(host,url,params = None,code = None,headers=None,method='GET',semlock=None):
""" 访问特征,返回json """
headers = {
'rc_auth_key':'rc_developer',
'rc_auth_secret':'rc_secret'
}
_dict = {}
try:
_response = get_response_http(host,url,params=params,method=method,headers = headers)
if _response['code'] == 200:
for key,value in _response.items():
_dict[key] = value
except Exception as e:
print(e.args)
finally:
if semlock:
semlock.release()
if _dict:
return _dict
# if __name__ == '__main__':
# -*- coding:utf-8 -*-
from config import read_properties
from config import settings
import pandas as pd
class Mapping():
def __init__(self):
print('初始化 Mapping ......')
_dataPath = read_properties.get_config().get_by_name('path','data_path')
self._reportPath = read_properties.get_config().get_by_name('path','report_path')
_columns_Basics = pd.read_excel(settings.FILE_PATH+_dataPath+'feature_基础特征.xlsx',sheet_name='字段对应关系')
self._columns_Basics = _columns_Basics.loc[_columns_Basics['是否测试'] == 1]
self.Mapping_Basics = dict(zip(self._columns_Basics['需求字段名'].tolist(),
self._columns_Basics['字段含义'].tolist()))
self.Mapping_Basics_rename = dict(zip(self._columns_Basics['需求字段名'].tolist(),
self._columns_Basics['特征平台特征字段名称'].tolist()))
_columns_Complex_order = pd.read_excel(settings.FILE_PATH+_dataPath+'多订单的新特征设计文档.xlsx',sheet_name='字段对应关系')
self._columns_Complex_order = _columns_Complex_order.loc[_columns_Complex_order['是否测试'] == 1]
self.Mapping_Complex_order = dict(zip(self._columns_Complex_order['需求字段名'].tolist(),
self._columns_Complex_order['字段含义'].tolist()))
self.Mapping_Complex_order_rename = dict(zip(self._columns_Complex_order['需求字段名'].tolist(),
self._columns_Complex_order['特征平台特征字段名称'].tolist()))
_columns_Collection = pd.read_excel(settings.FILE_PATH+_dataPath+'催收模型特征_v1.xlsx',sheet_name='字段对应关系')
self._columns_Collection = _columns_Collection.loc[_columns_Collection['是否测试'] == 1]
self.Mapping_Collection = dict(zip(self._columns_Collection['需求字段名'].tolist(),
self._columns_Collection['字段含义'].tolist()))
self.Mapping_Collection_rename = dict(zip(self._columns_Collection['需求字段名'].tolist(),
self._columns_Collection['特征平台特征字段名称'].tolist()))
print('完成初始化 Mapping ......')
......@@ -56,8 +56,14 @@ def get_sql_user_loan_all(user_id):
t1.*,
if(t1.passdue_day > 0,1,0) as is_overdue
from (
select loan.user_id,loan.id as loan_id,loan.progress,fest.contract_loan_amount
,case when sub.channel_id is null then loan.created_from else sub.channel_id end as applied_from
select
IFNULL(aqr.order_no ,'') order_no
,loan.user_id,loan.id as loan_id,loan.progress,fest.contract_loan_amount
,case when loan.created_at >='2018-05-08' then aqr.apply_from
when loan.created_at < '2018-05-08' then loan.created_from
else null
end as applied_from
,if (sub.created_at is not null, sub.created_at ,loan.created_at) apply_time
,plan.id as plan_id,plan.term_no, plan.repayment_status, DATE_ADD(plan.deadline, INTERVAL 1 second) as deadline
,plan.repaid_at,plan.current_repayment,plan.required_repayment,record.principal,record.interest,record.service_fee
......@@ -71,11 +77,15 @@ def get_sql_user_loan_all(user_id):
left join loan_submit_info as sub on sub.loan_id = loan.id
left join repay as plan on plan.loan_application_history_id = loan.id
left join repayment_record as record on record.repayment_plan_id = plan.id
left join loan_account_ext as lae on lae.loan_id = loan.id
left join apply_quota_record as aqr on aqr.order_no = lae.order_no
where
loan.user_id = {0}
and fest.transaction_status in (2,5)
order by loan.id,plan.id
) as t1
"""
# -- ,case when sub.channel_id is null then loan.created_from else sub.channel_id end as applied_from
_df = pd.read_sql(sql=_sql.format(user_id),con=con_tuomin_xyqb)
return _df
......@@ -85,27 +95,51 @@ def get_sql_apply(user_id):
""" 用户申请次数 """
_sql = """
SELECT
user_id,
if(ls.created_at is not null, ls.created_at, l.created_at) apply_time,
if(l.progress in (6, 8, 19), 1, 0) apply_status
user_id,
if(ls.created_at is not null, ls.created_at, l.created_at) apply_time,
if(l.progress in (6, 8, 19), 1, 2) apply_status
FROM
loan_application_history l
LEFT JOIN loan_account_ext e
loan_application_history l
LEFT JOIN loan_account_ext e
ON e.loan_id = l.id
left join loan_submit_info ls on ls.loan_id = l.id
left join loan_submit_info ls on ls.loan_id = l.id
WHERE l.user_id = {0}
-- and l.business_type IN (0, 2, 4)
AND e.loan_id IS NULL
AND l.business_type IN (0, 2, 4)
AND e.loan_id IS NULL
union all
SELECT
user_id,
apply_time,
apply_status
user_id,
apply_time,
apply_status
FROM
apply_quota_record
apply_quota_record
WHERE user_id = {0}
order by apply_time;
"""
order by apply_time;
"""
_df = pd.read_sql(_sql.format(user_id),con_tuomin_xyqb)
return _df
def get_sql_loan_refuse_cnt_old(user_id):
# 一单一审放款拒绝
_sql = """
SELECT COUNT(1) count FROM loan_application_history l LEFT JOIN loan_account_ext e ON e.loan_id = l.id
WHERE l.user_id = {0}
and l.business_type IN (0,2)
AND l.progress IN (6,8,19) AND e.loan_id IS NULL
"""
_df = pd.read_sql(_sql.format(user_id),con_tuomin_xyqb)
return _df
def get_sql_loan_refuse_cnt_new(user_id,apply_time=None):
# 循环额度申请拒绝
_sql = """
SELECT COUNT(1) count FROM apply_quota_record
WHERE user_id = {0}
AND apply_status = 1"""
if apply_time:
_sql = _sql+ """AND apply_time <= '{1}'"""
_df = pd.read_sql(_sql.format(user_id,apply_time),con_tuomin_xyqb)
return _df
......@@ -10,6 +10,7 @@ import tornado.log
import logging
import tornado.options
# from handler import LoanDueRateHandler
from handler import Feature_Calc_Features # 多订单
from handler import TransV2Read_Handler
from handler import NewAudit_Handler
from handler import ComplexOrder_Handler # 多订单
......@@ -26,6 +27,7 @@ class LogFormatter(tornado.log.LogFormatter):
def apps():
return Application([
(URL.url_calc_features,Feature_Calc_Features.Calc_Features),
(URL.url_trans_v2_read, TransV2Read_Handler.Trans_V2_Read),
(URL.url_new_audit,NewAudit_Handler.New_Audit),
(URL.url_complex_order,ComplexOrder_Handler.ComplexOrder),
......
from service.Auto_Unittest import Test
from testcase.test_trans_v2_read import *
from features.test_trans_v2_read import *
Test.set_instance_method(test_trans_v2_read)
......
import pandas as pd
from config import concat_sql
# from features import feature_collection
from utils import tools
from config import settings
from service import Http_Requests
from config import read_properties
import threading
from service.Mapping import Mapping
class Test_Base(Mapping):
def __init__(self):
self.settings = settings
pass
def get_Threads_response(self,func,ip,url,data,maxconnections=30):
maxconnections = maxconnections #最大并发数
semlock = threading.BoundedSemaphore(maxconnections)
_list_Thread = []
_retrun = []
for i in range(len(data)):
semlock.acquire()
t = tools.MyThread(target=eval(func),args=(ip,url,data[i],semlock))
_list_Thread.append(t)
t.start()
for i in range(len(_list_Thread)):
_list_Thread[i].join()
_retrun.append(_list_Thread[i].get_return())
_df = pd.DataFrame(_retrun)
return _df
def get_pd_melt(self,df):
pass
if __name__ == '__main__':
Test_Base()
\ No newline at end of file
# -*- coding:utf-8 -*-
import pandas as pd
from config import concat_sql
from features import feature_collection
from utils import tools
from service import Http_Requests
from test_case.test_Base import Test_Base
from config import settings
class ComplexOrder(Test_Base):
def __init__(self):
Test_Base.__init__(self)
params = [{
'user_uuid':'7713913adfaa4550b6053e69ddbe9c34'
}]
_df = self.get_Threads_response('Http_Requests.get_json_feature',
settings.FEATURE_HOST_MY,
settings.url_complex_order,
params)
def _get_Msg(self):
pass
# Http_Requests.get_json_feature()
if __name__ == '__main__':
c = ComplexOrder()
# params = {
# 'loanId':102394628,
# 'term_no':3,
# 'user_uuid':'fbd562f3-4a25-4cdf-9611-358385b760f3'
# }
# dd = get_json_feature('http://localhost:23010',
# '/features/collection',
# params=params
# )
# print(dd)
\ No newline at end of file
# -*- coding:utf-8 -*-
import pandas as pd
from config import concat_sql
from features import feature_collection
from utils import tools
from service import Http_Requests
from test_case.test_Base import Test_Base
from config import settings
class Collection(Test_Base):
def __init__(self):
Test_Base.__init__(self)
params = [{
'loanId':102394628,
'term_no':3,
'user_uuid':'fbd562f3-4a25-4cdf-9611-358385b760f3'
}]
_df = self.get_Threads_response('Http_Requests.get_json_feature',
settings.FEATURE_HOST_MY,
settings.url_collection,
params)
def _get_Msg(self):
pass
# Http_Requests.get_json_feature()
if __name__ == '__main__':
c = Collection()
# params = {
# 'loanId':102394628,
# 'term_no':3,
# 'user_uuid':'fbd562f3-4a25-4cdf-9611-358385b760f3'
# }
# dd = get_json_feature('http://localhost:23010',
# '/features/collection',
# params=params
# )
# print(dd)
\ No newline at end of file
def test_new_audit():
pass
\ No newline at end of file
......@@ -9,16 +9,16 @@ import json
## 常数变量定义
## json 返回中的 错误码定义
class Constants:
Code_Success = '200'
Code_Success = 200
Msg_Success = 'SUCCESS'
Code_Fail = 1
Msg_Fail = '失败'
Code_Param_Error = '0002'
Code_Param_Error = -1
Msg_Param_Error = '参数错误'
Code_LoginToken_Auth_Error = '0003'
Code_LoginToken_Auth_Error = -2
Msg_LoginToken_Auth_Error = 'token登陆认证错误'
Code_Query_Fail = 4
......@@ -27,15 +27,18 @@ class Constants:
# Code_Query_Empty = 5
# Msg_Query_Empty = '数据为空'
Code_Error = '0006'
Code_Error = -3
Msg_Error = '数据异常'
Code_Zero_Error = '0007'
Code_Zero_Error = -4
Msg_Zero_Error = 'PSI计算异常,分母为0'
Code_Error_Value = 506
Msg_Error_Value = '没找到指定参数的返回值'
def build_json_feature(features=None,code=None,**kwargs):
def build_json_feature(features=None,code=None,mssage=None,**kwargs):
if features == None:
features = []
......@@ -44,7 +47,7 @@ def build_json_feature(features=None,code=None,**kwargs):
for key,value in kwargs.items():
body[str(key)] = value
ddata = {'code':code,'data':dict(subInfo = body,features=eval(repr(features)))}
ddata = {'code':code,'mssage':mssage,'data':dict(subInfo = body,features=eval(repr(features)))}
return json.dumps(ddata,ensure_ascii=False)
......
......@@ -5,7 +5,7 @@ import sys,datetime,time
import threading
import paramiko
import inspect
from collections import defaultdict
import requests
def div_list(ls,n):
......@@ -143,10 +143,6 @@ def get_message_id(start,cnt,index,sql,sql_con):
df = pd.concat(df_list,axis = 0)
return df
def get_message(cnt,index,sql,sql_con):
"""
cnt : 查询总数
......@@ -233,9 +229,6 @@ def parse_json_new(x):
return newJson
def str_connect(x,y):
return str(x)+'_'+str(y)
......@@ -285,43 +278,48 @@ def bytesToStr(x):
from collections import defaultdict
reDict = defaultdict(int) # 返回错误计数,用于出测试报告图表
def assert_diff(x,y,col_name,code=None):
global reDict
try:
if abs(float(x) - float(y)) > 1/100000:
if code:
reDict['error'][code]+=1
reDict[code]['error']+=1
reDict[code]['sum']+=1
return False
else:
reDict[col_name]+=1
return col_name+' : '+str(x)+'_'+str(y) + '\n'
else:
if code:
reDict['error'][code]+=0
reDict[code]['pass']+=1
reDict[code]['sum']+=1
return True
except Exception as e:
pass
try:
if str(x) != str(y):
if code:
reDict['error'][code]+=1
reDict[code]['error']+=1
reDict[code]['sum']+=1
return False
else:
reDict[col_name]+=1
return col_name+' : '+str(x)+'_'+str(y) + '\n'
else:
if code:
reDict['error'][code]+=0
reDict[code]['pass']+=1
reDict[code]['sum']+=1
return True
except:
pass
def assert_columns(df,columns,not_columns=[],is_AssertFilter=True,is_reDict = False,is_DelColumnsXY = True,code=None):
""" 校验Dataframe columns 的 x 和 y 列
not_columns : 不参与校验的列
......@@ -331,38 +329,39 @@ def assert_columns(df,columns,not_columns=[],is_AssertFilter=True,is_reDict = Fa
code : 提取某列作为错误统计
"""
df['assert'] = True
df['测试结论'] = False
global reDict
reDict = defaultdict(int)
if len(df)>0:
if code:
reDict['sum'] = len(df)
code = df.loc[df.duplicated('code',keep='first')==False]['code'].tolist()
for i in code:
reDict[i]={}
reDict[i]['sum'] = 0
reDict[i]['error'] = 0
reDict[i]['pass'] = 0
for cls in columns:
if cls not in ['assert']+not_columns :
if cls not in ['测试结论'] + not_columns :
try:
if code:
reDict['error'] = defaultdict(int)
df['assert'] = df.apply(lambda x : assert_diff(x[cls+'_x'],x[cls+'_y'],cls,x['code']),axis=1).astype(str)
df['测试结论'] = df.apply(lambda x : assert_diff(x[cls+'_x'],x[cls+'_y'],cls,x['code']),axis=1).astype(str)
else:
df['assert'] = df.apply(lambda x : assert_diff(x[cls+'_x'],x[cls+'_y'],cls),axis=1).astype(str)
df['测试结论'] = df.apply(lambda x : assert_diff(x[cls+'_x'],x[cls+'_y'],cls),axis=1).astype(str)
df[cls] = df.apply(lambda x : str_connect(x[cls+'_x'],x[cls+'_y']),axis=1)
if is_DelColumnsXY:
del df[cls+'_x']
del df[cls+'_y']
except:
pass
if is_AssertFilter:
df = df.loc[~(df['assert'] == 'True') | ((df['assert'] == ''))]
df = df.loc[df['assert']!=True]
if is_reDict:
return df,dict(reDict)
else:return df
def matching_data(df,key):
"""
feature 特征匹配规则
......@@ -379,8 +378,6 @@ def matching_data(df,key):
# 第二轮筛选:时间最近的数据
df_g_time = df.groupby(key).agg({'seconds':'min'}).reset_index()
df_time = pd.merge(df_g_time,df,how='left',on=key+['seconds'])
# print(df_time.loc[df_time['uuid'] == 'ed961be0-7fd2-4fca-b1d1-c4cc8e24a7bc'])
......@@ -456,7 +453,6 @@ def return_LenValue(value,lens,types):
return value
else:return ''
class MyThread(threading.Thread):
def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None):
threading.Thread.__init__(self, group, target, name, args, kwargs, daemon=daemon)
......@@ -474,4 +470,4 @@ class MyThread(threading.Thread):
def join(self):
threading.Thread.join(self)
return self._return
return self._return
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment