Commit 779585d7 authored by 张鹏程's avatar 张鹏程

提交全量代码

parents
model-data-test
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<resourceExtensions />
<wildcardResourcePatterns>
<entry name="!?*.java" />
<entry name="!?*.form" />
<entry name="!?*.class" />
<entry name="!?*.groovy" />
<entry name="!?*.scala" />
<entry name="!?*.flex" />
<entry name="!?*.kt" />
<entry name="!?*.clj" />
<entry name="!?*.aj" />
</wildcardResourcePatterns>
<annotationProcessing>
<profile default="true" name="Default" enabled="false">
<processorPath useClasspath="true" />
</profile>
</annotationProcessing>
<bytecodeTargetLevel target="1.8" />
</component>
</project>
\ No newline at end of file
<component name="CopyrightManager">
<settings default="" />
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="PROJECT" charset="UTF-8" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="MavenImportPreferences">
<option name="generalSettings">
<MavenGeneralSettings>
<option name="localRepository" value="/usr/local/maven/loacalRepository" />
<option name="mavenHome" value="/usr/local/maven/apache-maven-3.5.2" />
<option name="userSettingsFile" value="/usr/local/maven/apache-maven-3.5.2/conf/settings.xml" />
</MavenGeneralSettings>
</option>
<option name="importingSettings">
<MavenImportingSettings>
<option name="generatedSourcesFolder" value="GENERATED_SOURCE_FOLDER" />
<option name="importAutomatically" value="true" />
</MavenImportingSettings>
</option>
</component>
<component name="ProjectInspectionProfilesVisibleTreeState">
<entry key="Project Default">
<profile-state>
<expanded-state>
<State>
<id />
</State>
<State>
<id>Abstraction issuesJava</id>
</State>
<State>
<id>Android</id>
</State>
<State>
<id>Android Lint</id>
</State>
<State>
<id>Application Server Specific Inspections</id>
</State>
<State>
<id>Bitwise operation issuesJava</id>
</State>
<State>
<id>Bitwise operation issuesJavaScript</id>
</State>
<State>
<id>CDI(Contexts and Dependency Injection) issues</id>
</State>
<State>
<id>Class metricsJava</id>
</State>
<State>
<id>Control FlowGroovy</id>
</State>
<State>
<id>Control flow issuesJava</id>
</State>
<State>
<id>Control flow issuesJavaScript</id>
</State>
<State>
<id>Error handlingJava</id>
</State>
<State>
<id>Faces Model</id>
</State>
<State>
<id>GSPGrailsGroovy</id>
</State>
<State>
<id>General</id>
</State>
<State>
<id>GeneralJavaScript</id>
</State>
<State>
<id>Google App Engine (Python)</id>
</State>
<State>
<id>GrailsGroovy</id>
</State>
<State>
<id>Groovy</id>
</State>
<State>
<id>JSP Inspections</id>
</State>
<State>
<id>Java</id>
</State>
<State>
<id>Java EE issues</id>
</State>
<State>
<id>JavaBeans issuesJava</id>
</State>
<State>
<id>JavaScript</id>
</State>
<State>
<id>JavaScript validity issuesJavaScript</id>
</State>
<State>
<id>Logging issuesJava</id>
</State>
<State>
<id>Maven</id>
</State>
<State>
<id>Numeric issuesJava</id>
</State>
<State>
<id>Performance issuesJava</id>
</State>
<State>
<id>Plugin DevKit</id>
</State>
<State>
<id>Portability issuesJava</id>
</State>
<State>
<id>Potentially confusing code constructsJavaScript</id>
</State>
<State>
<id>Probable bugsJava</id>
</State>
<State>
<id>Python</id>
</State>
<State>
<id>Resource management issuesJava</id>
</State>
<State>
<id>SQL</id>
</State>
<State>
<id>Threading issuesGroovy</id>
</State>
<State>
<id>Threading issuesJava</id>
</State>
<State>
<id>WebSocket issues</id>
</State>
<State>
<id>XPath</id>
</State>
<State>
<id>XSLT</id>
</State>
</expanded-state>
<selected-state>
<State>
<id>Android</id>
</State>
</selected-state>
</profile-state>
</entry>
</component>
<component name="ProjectLevelVcsManager" settingsEditedManually="false">
<OptionsSetting value="true" id="Add" />
<OptionsSetting value="true" id="Remove" />
<OptionsSetting value="true" id="Checkout" />
<OptionsSetting value="true" id="Update" />
<OptionsSetting value="true" id="Status" />
<OptionsSetting value="true" id="Edit" />
<ConfirmationsSetting value="0" id="Add" />
<ConfirmationsSetting value="0" id="Remove" />
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" default="false" assert-keyword="true" jdk-15="true" project-jdk-name="Python 3.6.10 (~/opt/anaconda3/envs/python36/bin/python3.6)" project-jdk-type="Python SDK" />
<component name="PyConsoleOptionsProvider">
<option name="myPythonConsoleState">
<console-settings sdk-home="$USER_HOME$/opt/anaconda3/envs/python36/bin/python3.6">
<option name="mySdkHome" value="$USER_HOME$/opt/anaconda3/envs/python36/bin/python3.6" />
<option name="myAddSourceRoots" value="true" />
</console-settings>
</option>
</component>
<component name="SvnConfiguration">
<configuration>$USER_HOME$/.subversion</configuration>
</component>
<component name="masterDetails">
<states>
<state key="GlobalLibrariesConfigurable.UI">
<settings>
<last-edited>Python 3.6.10 (~/opt/anaconda3/envs/python36/bin/python3.6) interpreter library</last-edited>
<splitter-proportions>
<option name="proportions">
<list>
<option value="0.2" />
</list>
</option>
</splitter-proportions>
</settings>
</state>
<state key="JdkListConfigurable.UI">
<settings>
<last-edited>Python 3.6.10 (~/opt/anaconda3/envs/python36/bin/python3.6)</last-edited>
<splitter-proportions>
<option name="proportions">
<list>
<option value="0.2" />
</list>
</option>
</splitter-proportions>
</settings>
</state>
<state key="ProjectJDKs.UI">
<settings>
<last-edited>1.8</last-edited>
<splitter-proportions>
<option name="proportions">
<list>
<option value="0.2" />
</list>
</option>
</splitter-proportions>
</settings>
</state>
<state key="ProjectLibrariesConfigurable.UI">
<settings>
<splitter-proportions>
<option name="proportions">
<list>
<option value="0.2" />
</list>
</option>
</splitter-proportions>
</settings>
</state>
<state key="ScopeChooserConfigurable.UI">
<settings>
<splitter-proportions>
<option name="proportions">
<list>
<option value="0.2" />
</list>
</option>
</splitter-proportions>
</settings>
</state>
</states>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/model-data-test.iml" filepath="$PROJECT_DIR$/.idea/model-data-test.iml" />
</modules>
</component>
</project>
\ No newline at end of file
This diff is collapsed.
[2020-03-12 19:01:28 process.py:fork_processes:128 INFO] Starting 4 processes
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 1 (pid 55210) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 3 (pid 55212) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 2 (pid 55211) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 0 (pid 55209) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 2 (pid 55215) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 3 (pid 55214) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 0 (pid 55216) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 1 (pid 55213) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 3 (pid 55218) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 2 (pid 55217) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 1 (pid 55220) exited with status 1, restarting
[2020-03-12 19:01:29 process.py:fork_processes:163 WARNING] child 0 (pid 55219) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 3 (pid 55226) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 1 (pid 55229) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 2 (pid 55227) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 0 (pid 55230) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 3 (pid 55242) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 2 (pid 55244) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 1 (pid 55243) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 0 (pid 55245) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 3 (pid 55248) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 1 (pid 55250) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 0 (pid 55251) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 2 (pid 55249) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 0 (pid 55254) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 1 (pid 55253) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 2 (pid 55255) exited with status 1, restarting
[2020-03-12 19:01:30 process.py:fork_processes:163 WARNING] child 3 (pid 55252) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 1 (pid 55257) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 3 (pid 55259) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 2 (pid 55258) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 0 (pid 55256) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 3 (pid 55261) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 2 (pid 55262) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 0 (pid 55263) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 1 (pid 55260) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 3 (pid 55264) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 0 (pid 55266) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 1 (pid 55267) exited with status 1, restarting
[2020-03-12 19:01:31 process.py:fork_processes:163 WARNING] child 2 (pid 55265) exited with status 1, restarting
[2020-03-12 19:01:32 process.py:fork_processes:163 WARNING] child 0 (pid 55269) exited with status 1, restarting
[2020-03-12 19:01:32 process.py:fork_processes:163 WARNING] child 1 (pid 55270) exited with status 1, restarting
[2020-03-12 19:01:32 process.py:fork_processes:163 WARNING] child 2 (pid 55271) exited with status 1, restarting
[2020-03-12 19:01:32 process.py:fork_processes:163 WARNING] child 3 (pid 55268) exited with status 1, restarting
[2019-12-04 15:32:41 log.py:info:109 INFO] SHOW VARIABLES LIKE 'sql_mode'
[2019-12-04 15:32:41 log.py:info:109 INFO] {}
[2019-12-04 15:32:41 log.py:info:109 INFO] SELECT DATABASE()
[2019-12-04 15:32:41 log.py:info:109 INFO] {}
[2019-12-04 15:32:41 log.py:info:109 INFO] show collation where `Charset` = 'utf8' and `Collation` = 'utf8_bin'
[2019-12-04 15:32:41 log.py:info:109 INFO] {}
[2019-12-04 15:32:41 log.py:info:109 INFO] SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
[2019-12-04 15:32:41 log.py:info:109 INFO] {}
[2019-12-04 15:32:41 log.py:info:109 INFO] SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
[2019-12-04 15:32:41 log.py:info:109 INFO] {}
[2019-12-04 15:32:41 log.py:info:109 INFO] SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8) COLLATE utf8_bin AS anon_1
[2019-12-04 15:32:41 log.py:info:109 INFO] {}
[2019-12-04 15:32:41 log.py:info:109 INFO] BEGIN (implicit)
[2019-12-04 15:32:41 log.py:info:109 INFO] SELECT feature_detail.id AS feature_detail_id, feature_detail.feature_name AS feature_detail_feature_name, feature_detail.feature_content AS feature_detail_feature_content, feature_detail.feature_type AS feature_detail_feature_type, feature_detail.feature_value AS feature_detail_feature_value, feature_detail.feature_gdp AS feature_detail_feature_gdp, feature_detail.data_type AS feature_detail_data_type, feature_detail.data_round AS feature_detail_data_round, feature_detail.is_active AS feature_detail_is_active, feature_detail.create_at AS feature_detail_create_at, feature_detail.update_at AS feature_detail_update_at
FROM feature_detail
WHERE feature_detail.feature_name = %(feature_name_1)s
[2019-12-04 15:32:41 log.py:info:109 INFO] {'feature_name_1': 'f10'}
[2019-12-04 15:32:41 web.py:log_request:2106 INFO] 200 GET /api/feature?feature_name=f10 (::1) 15.98ms
[2019-12-04 15:32:41 log.py:info:109 INFO] ROLLBACK
[2019-12-04 15:32:42 log.py:info:109 INFO] SHOW VARIABLES LIKE 'sql_mode'
[2019-12-04 15:32:42 log.py:info:109 INFO] {}
[2019-12-04 15:32:42 log.py:info:109 INFO] SELECT DATABASE()
[2019-12-04 15:32:42 log.py:info:109 INFO] {}
[2019-12-04 15:32:42 log.py:info:109 INFO] show collation where `Charset` = 'utf8' and `Collation` = 'utf8_bin'
[2019-12-04 15:32:42 log.py:info:109 INFO] {}
[2019-12-04 15:32:42 log.py:info:109 INFO] SELECT CAST('test plain returns' AS CHAR(60)) AS anon_1
[2019-12-04 15:32:42 log.py:info:109 INFO] {}
[2019-12-04 15:32:42 log.py:info:109 INFO] SELECT CAST('test unicode returns' AS CHAR(60)) AS anon_1
[2019-12-04 15:32:42 log.py:info:109 INFO] {}
[2019-12-04 15:32:42 log.py:info:109 INFO] SELECT CAST('test collated returns' AS CHAR CHARACTER SET utf8) COLLATE utf8_bin AS anon_1
[2019-12-04 15:32:42 log.py:info:109 INFO] {}
[2019-12-04 15:32:42 log.py:info:109 INFO] BEGIN (implicit)
[2019-12-04 15:32:42 log.py:info:109 INFO] SELECT feature_detail.id AS feature_detail_id, feature_detail.feature_name AS feature_detail_feature_name, feature_detail.feature_content AS feature_detail_feature_content, feature_detail.feature_type AS feature_detail_feature_type, feature_detail.feature_value AS feature_detail_feature_value, feature_detail.feature_gdp AS feature_detail_feature_gdp, feature_detail.data_type AS feature_detail_data_type, feature_detail.data_round AS feature_detail_data_round, feature_detail.is_active AS feature_detail_is_active, feature_detail.create_at AS feature_detail_create_at, feature_detail.update_at AS feature_detail_update_at
FROM feature_detail
WHERE feature_detail.feature_name = %(feature_name_1)s
[2019-12-04 15:32:42 log.py:info:109 INFO] {'feature_name_1': 'f10'}
[2019-12-04 15:32:42 web.py:log_request:2106 INFO] 200 GET /api/feature?feature_name=f10 (::1) 11.67ms
[2019-12-04 15:32:42 log.py:info:109 INFO] ROLLBACK
# -*- coding:utf-8 -*-
from sqlalchemy import create_engine
from model_data_test import settings
from model_data_api.model import tab_model
tab_model.db_base.metadata.drop_all(bind=create_engine(settings.DB_CONNECT_STRING['model_db']))
tab_model.db_base.metadata.create_all(bind=create_engine(settings.DB_CONNECT_STRING['model_db']))
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>
\ No newline at end of file
# -*- coding:utf-8 -*-
from model_data_api.handler.test_handler import UserstHandler
from model_data_api.handler.ModelsData_Handler import ModelsDataHandler
from model_data_api.handler.FeatureData_Handler import FeatureDataHandler
from model_data_api.handler.AssertData_Handler import AssertDataHandler
from model_data_api.handler.DataSet_Handler import DataSetHandler
from model_data_api.handler.DataSetDetail_Handler import DatadetailHandler
from model_data_api.handler.DataSetDetailRead_Handler import DatadetailreadHandler
from model_data_api.handler.Index_Handler import IndexHandler,IndexHandler_test,IndexHandler_test_new
from model_data_api.handler.UserLogin_Handler import UserLoginHandler
from model_data_api.handler.ModelsExec_Handler import ModelsExecHandler
from model_data_api.handler.UserTestData_Handler import UserTestDataHandler
from model_data_api.handler.Automatic_Handler import AutomaticHandler
import platform
separator = '\\' if platform.system() == 'Windows' else '/'
\ No newline at end of file
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from model_data_test import settings
def DB_Base():
engine = create_engine(settings.DB_CONNECT_STRING['model_db'],connect_args={"use_unicode":True,"charset":"utf8"})
Base = declarative_base(bind=engine)
return Base
\ No newline at end of file
# -*- coding:utf-8 -*-
from model_data_api.model.assertData_model import AssertDataManager
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from utils import JsonUtil
import pandas as pd
import json
app = app()
@app.route('/api/assertdata')
class AssertDataHandler(BaseHandler):
def initialize(self):
self.session = AssertDataManager()
pass
def on_finish(self):
self.session.close()
def post(self,*args,**kwargs):
try :
data = []
if self.request.body:
data = json.loads(str(self.request.body, encoding = "utf-8"))
if data:
_name =data['data_basic']['assert_name']
if not self.session.find_single(_name):
_df = pd.DataFrame(data['data_detail'])
assert_type = _df.apply(lambda x : assert_value(x),axis=1)
msg = ','.join([i for i in assert_type.tolist() if i])
if msg:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Param_Error,
msg=JsonUtil.Constants.Msg_Param_Error,
message = msg
))
else:
_df_detail = pd.DataFrame(data['data_detail'])
_df_detail.loc[_df_detail['data_type'].isnull(),'data_type'] = 0
if 'data_round' not in _df_detail.columns.tolist():
_df_detail['data_round'] = 0
else:
_df_detail.loc[_df_detail['data_round'].isnull(),'data_round'] = 0
_df_detail.loc[_df_detail['assert_value'].isnull(),'assert_value'] = None
_df_detail['data_type'] = _df_detail['data_type'].astype(int)
_df_detail['data_round'] = _df_detail['data_round'].astype(int)
data_detail = _df_detail.to_dict(orient='records')
self.session.create_obj(data['data_basic'],data_detail)
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Success,
msg=JsonUtil.Constants.Msg_Success,
message = "测试数据集创建成功"
))
else:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Success,
msg=JsonUtil.Constants.Msg_Success,
message = "测试数据集已存在"
))
self.flush()
self.finish()
# if assert_name and assert_type:
# self.session.create_obj(assert_name=assert_name,assert_content=assert_content,
# assert_value=assert_value,assert_type=assert_type)
#
# self.write(JsonUtil.build_json(code=JsonUtil.Constants.Code_Success,
# msg=JsonUtil.Constants.Msg_Success
# ))
# self.flush()
# self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def get(self, *args, **kwargs):
try:
id = self.get_argument('id',default=None)
if id:
result = self.session.find_single(id)
else:
result = self.session.find_all()
if result:
df = pd.DataFrame(result)
df['create_at'] = df['create_at'].astype(str)
df['update_at'] = df['update_at'].astype(str)
result = df.to_dict(orient = 'records')
code = JsonUtil.Constants.Code_Success
msg = JsonUtil.Constants.Msg_Success
else:
code = JsonUtil.Constants.Code_Result_Error
msg = JsonUtil.Constants.Msg_Result_Error
self.write(JsonUtil.build_json_data(code=code,
msg=msg,
result=result
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def assert_value(x):
_type = x['assert_type']
_value = x['assert_value']
_error = ''
if _type == '区间':
try:
int(x['data_type'])
except:
_error += "value :"+str(_value)+ " 是区间值,需要设置类型(data_type); "
else:
try :
if x['data_type'] == 2:
int(x['data_round'])
except:
_error = _error.join("value :"+str(_value)+ " 小数区间,需要设置小数位数(data_type); ")
if _error != '' :
return _error
\ No newline at end of file
# -*- coding:utf-8 -*-
from utils import JsonUtil
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.model.modelsData_model import ModelsDataManager
from model_data_api.model.featureData_model import FeatureDataManager
from model_data_api.model.dataSet_model import DataSetManager
from model_data_api.model.dataSetDetail_model import DataSetDetailManager
from model_data_api.model.userTestData_model import UserTestDataManager
from model_data_api.views import *
from model_data_api.handler.logic import *
import requests
app = app()
@app.route('/api/automatic')
class AutomaticHandler(BaseHandler):
"""一键数据生成"""
def post(self):
self.model_name = self.get_argument('model_name',default=None)
self.user_count = self.get_argument('user_count',default=None)
if self.model_name and self.user_count:
self.feature_name = None
self.auto_feature = []
self.auto_dataset = {"data_basic":{"model_name":self.model_name,"count":self.user_count},
"data_detail":[]}
url = 'http://172.21.10.25:9012/manage/features'
r = requests.post(url,data={'codes':self.model_name})
if r.status_code == 200:
response = r.json()
self.feature_name = response['data'][self.model_name]['new']+response['data'][self.model_name]['old']
result = self.get_auto()
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result = result['result'],
json_path = result['json_path']
))
self.flush()
self.finish()
else:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Param_Error,
message=JsonUtil.Constants.Msg_Param_Error
))
self.flush()
self.finish()
def get_auto(self):
modelsData = ModelsDataManager()
featureData = FeatureDataManager()
dataSet = DataSetManager()
dataSetDetail = DataSetDetailManager()
userTestData = UserTestDataManager()
result_model = add_models_logic(modelsData,self.model_name,'自动生成','自动')
for f in self.feature_name:
self.auto_feature.append({'feature_name':f,'feature_content':'自动生成','assert_name':'模型测试'})
result_feature = add_features_logic(featureData,self.auto_feature)
for f in self.auto_feature:
self.auto_dataset['data_detail'].append(f)
result_dataset = set_modelsAndfeature_logic(dataSet,self.auto_dataset)
result_datasetdetail = create_dataSet_logic(dataSetDetail,self.model_name,common)
result_add_userTest = None
if result_datasetdetail['code'] == 200:
result_add_userTest = add_userTest_logic(userTestData,result_datasetdetail['result']['detail_path'],common)
result_userTest = None
if result_add_userTest['code'] == 200:
result_userTest = get_userTest_logic(userTestData,self.model_name,result_add_userTest['set_datail_id'])
result_userTest['json_path'] = result_datasetdetail['result']['detail_path']
return result_userTest
import tornado.web
class BaseHandler(tornado.web.RequestHandler):
# 初始化映射关系
def set_default_headers(self):
self.set_header('Content-Type','application/json; charset=utf-8')
self.set_header('Accept','application/json; charset=utf-8')
self.set_header("Content-Type","text/html")
def prepare(self):
self.set_header('Content-Type','application/json;text/html; charset=utf-8')
self.set_header('Accept','application/json; charset=utf-8')
self.set_header('Content-type', 'application/json')
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from utils.tools import *
from utils import JsonUtil
import json
app = app()
@app.route('/api/datasetDetailRead')
class DatadetailreadHandler(BaseHandler):
"""读取文档"""
def get(self, *args, **kwargs):
# data_id = self.get_argument('data_id',default=None)
try:
file_path = self.get_argument('file_path',default=None)
if file_path:
try:
with open(get_path(common,'suite',file_path), 'r',encoding='utf8') as f:
result = json.loads(f.read())
f.close()
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Success,
message=JsonUtil.Constants.Msg_Success,
msg = "json 读取成功",
result=result
))
except:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_File_Error,
message=JsonUtil.Constants.Msg_File_Error
))
else:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Param_Error,
message=JsonUtil.Constants.Msg_Param_Error
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except IOError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_File_Error, JsonUtil.Constants.Msg_File_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
\ No newline at end of file
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.model.dataSetDetail_model import DataSetDetailManager
from model_data_api.views import *
from model_data_api.handler.logic import *
app = app()
@app.route('/api/datasetDetail')
class DatadetailHandler(BaseHandler):
"""获取数据明细 和 创建明细数据"""
def initialize(self):
self.session = DataSetDetailManager()
def get(self,*args,**kwargs):
try:
model_name = self.get_argument('model_name',default=None)
if model_name:
result = self.session.find_single(model_name = model_name)
if result:
df = pd.DataFrame(result)
df['create_at'] = df['create_at'].astype(str)
df['update_at'] = df['update_at'].astype(str)
df['file_name'] = df['data_path'].apply(lambda x: "".join(x.split('/')[-1:]))
result = df.to_dict(orient = 'records')
else:
result = self.session.find_all()
if result:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Success,
msg=JsonUtil.Constants.Msg_Success,
result = result
))
else:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Result_Error,
msg=JsonUtil.Constants.Msg_Result_Error
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def post(self,*args,**kwargs):
try:
# data = self.request.body_arguments
model_name = self.get_argument('model_name',default=None)
result = create_dataSet_logic(self.session,model_name,common)
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result=result['result'],
model_name = model_name
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except IOError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_File_Error, JsonUtil.Constants.Msg_File_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def lst_Covering(lst,num,istrue=True):
index = 0
while num>0:
if istrue:
lst[index] +=1
else:
# print(num,index,lst[index])
lst[index] -=1
index+=1
num = num-1
if index >= len(lst):
index = 0
return lst
def set_allocation_num(gdp,count):
"""分配"""
res = []
for i in gdp:
res.append(round(count*i/100))
_sum = sum(res)
if _sum > count:
_temp = _sum - count
res = sorted(res,reverse = True)
res = lst_Covering(res,_temp,False)
elif _sum < count:
_temp = count - _sum
res = sorted(res)
res = lst_Covering(res,_temp)
return res
def get_array(name,gdp,type,value,dataType,dataRound):
lst = []
for i,(g,t,v,dt,dr) in enumerate(zip(gdp,type,value,dataType,dataRound)):
temp = [name[i]] * int(g)
for _t,_temp in enumerate(temp):
if t=='区间':
if dt:
if v:
_v = v.replace('-',',').split(',')
if dt == 1:
_data = np.random.randint(eval(_v[0]),eval(_v[1])+1)
elif dt == 2:
_data = round(np.random.uniform(eval(_v[0]),eval(_v[1])),dr)
elif dt == 3:
_r = np.random.randint(0,2)
if _r == 0:
_data = np.random.randint(-1000,1000000)
elif _r == 1:
_data = round(np.random.uniform(-1000,1000000),6)
temp[_t] = _data
elif t == '正数':
temp[_t] = int(v)
elif t == '异常空值':
temp[_t] = eval(v)
elif t == '字符串':
temp[_t] = str(v)
lst.extend(temp)
random.shuffle(lst)
return lst
# -*- coding:utf-8 -*-
# from model_data_api.models import DataSet
from model_data_api.model.dataSet_model import DataSetManager
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from model_data_api.handler.logic import *
import pymysql
app = app()
import json
@app.route('/api/dataset')
class DataSetHandler(BaseHandler):
def initialize(self):
self.session = DataSetManager()
pass
def on_finish(self):
self.session.close()
def delete(self):
items = self.get_arguments('item[]')
self.write(str(items))
def post(self,*args,**kwargs):
# data = self.request.body_arguments
try:
data = []
if self.request.body:
data = json.loads(self.request.body)
print(data)
result = set_modelsAndfeature_logic(self.session,data)
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result = result['result']
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Param_Error,message= JsonUtil.Constants.Msg_Param_Error,data=data))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Error,message= e.args,data=data))
self.flush()
# -*- coding:utf-8 -*-
from model_data_api.model.featureData_model import FeatureDataManager
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from model_data_api.handler.logic import *
import pandas as pd
import json
app = app()
@app.route('/api/feature')
class FeatureDataHandler(BaseHandler):
def initialize(self):
self.session = FeatureDataManager()
pass
def on_finish(self):
self.session.close()
def post(self,*args,**kwargs):
"""
[{"feature_name":"f1","feature_content":"测试使用","assert_name":"模型测试"}]
"""
try :
data = []
if self.request.body:
data = json.loads(str(self.request.body, encoding = "utf-8"))
result = add_features_logic(self.session,data)
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result = result['result'],
data = data
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def get(self, *args, **kwargs):
try:
feature_name = self.get_argument('feature_name',default=None)
result = get_features_logic(self.session,feature_name)
# if feature_name:
# result = self.session.find_single(feature_name)
# else:
# result = self.session.find_all()
# if result:
# df = pd.DataFrame(result)
# df['create_at'] = df['create_at'].astype(str)
# df['update_at'] = df['update_at'].astype(str)
# result = df.to_dict(orient = 'records')
# code = JsonUtil.Constants.Code_Success
# msg = JsonUtil.Constants.Msg_Success
#
# else:
# code = JsonUtil.Constants.Code_Result_Error
# msg = JsonUtil.Constants.Msg_Result_Error
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result=result['result'],
feature_name = feature_name
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def assert_value(x):
_type = x['feature_type']
_value = x['feature_value']
_error = ''
if _type == '区间':
try:
int(x['data_type'])
except:
_error += "value :"+str(_value)+ " 是区间值,需要设置类型(data_type); "
else:
try :
if x['data_type'] == 2:
int(x['data_round'])
except:
_error = _error.join("value :"+str(_value)+ " 小数区间,需要设置小数位数(data_type); ")
if _error != '' :
return _error
# -*- coding:utf-8 -*-
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
app = app()
@app.route('/api/index')
class IndexHandler(BaseHandler):
def get(self, *args, **kwargs):
self.render('index.html')
@app.route('/index_test')
class IndexHandler_test(BaseHandler):
def get(self, *args, **kwargs):
self.render('index_test.html')
@app.route('/index_test_new')
class IndexHandler_test_new(BaseHandler):
def get(self, *args, **kwargs):
self.render('mybase_02.html')
# -*- coding:utf-8 -*-
from model_data_api.model.modelsData_model import ModelsDataManager
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from utils import JsonUtil
import pandas as pd
from model_data_api.handler.logic import *
app = app()
@app.route('/api/models')
class ModelsDataHandler(BaseHandler):
def initialize(self):
self.session = ModelsDataManager()
pass
def on_finish(self):
self.session.close()
def post(self):
try:
name = self.get_argument('models_name',default=None)
content = self.get_argument('models_content',default='')
create_name = self.get_argument('models_create_name',default=None)
el_expression = self.get_argument('models_el_expression',default=None)
result = add_models_logic(self.session,name,content,create_name,el_expression)
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
model_name = name,
model_content =content,
model_el_expression =el_expression,
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def get(self, *args, **kwargs):
try:
models_name = self.get_argument('model_name',default=None)
result = get_models_logic(self.session,models_name)
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result=result['result'],
models_name = models_name
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
\ No newline at end of file
# -*- coding:utf-8 -*-
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from utils import JsonUtil
from model_data_api.model.userTestData_model import UserTestDataManager
import requests
import pandas as pd
import json
import hashlib
app = app()
@app.route('/api/model/exec')
class ModelsExecHandler(BaseHandler):
def initialize(self):
self.session = UserTestDataManager()
def post(self):
try :
batch_uuid = self.get_argument('batch_uuid',default=None)
if batch_uuid:
result = self.session.find_single(batch_uuid)
if result:
data_detail = result['data_detail']
if isinstance(data_detail,str):
data_detail = eval(data_detail)
_result = data_detail
_result['data']['subInfo'] = {}
_result['data']['subInfo'] ={'batchUuid':result['batch_uuid']}
self.write(JsonUtil.build_json_with_data(code=JsonUtil.Constants.Code_Success,
msg= JsonUtil.Constants.Msg_Success,
message="",
data = _result['data']
))
else:
self.write(JsonUtil.build_json_with_data(code=JsonUtil.Constants.Code_Fail,
msg= JsonUtil.Constants.Msg_Fail,
message="无此用户"
))
else:
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Param_Error,
msg=JsonUtil.Constants.Msg_Param_Error
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json_with_data(code=JsonUtil.Constants.Code_Param_Error, msg=JsonUtil.Constants.Msg_Param_Error,message="出错了!!",batchUuid = batch_uuid))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json_with_data(code=JsonUtil.Constants.Code_Error,msg=JsonUtil.Constants.Msg_Error,message="出错了",batchUuid = batch_uuid))
self.flush()
def get_change(x):
_dict = {
"code": 200,
"data": {"features": None}
}
_list = {}
for key ,value in zip(x.keys(),x.values):
_list = dict(_list, **{key:{'state':200,'value':value}})
_dict['data']['features'] = _list
return _dict
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from utils.tools import *
from utils import JsonUtil
app = app()
@app.route('/api/login')
class UserLoginHandler(BaseHandler):
def get(self, *args, **kwargs):
self.write("UserstHandler")
def post(self,*args,**kwargs):
username = self.get_argument('username',default=None)
self.write(JsonUtil.build_json_data(code=JsonUtil.Constants.Code_Success,
msg=JsonUtil.Constants.Msg_Success,
result={'username':username}
))
self.flush()
self.finish()
\ No newline at end of file
# -*- coding:utf-8 -*-
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
from model_data_api.model.userTestData_model import UserTestDataManager
from model_data_api.handler.logic import *
app = app()
@app.route('/api/usertest')
class UserTestDataHandler(BaseHandler):
def initialize(self):
self.session = UserTestDataManager()
def get(self):
try:
model_name = self.get_argument('model_name',default=None)
data_set = self.get_argument('data_set_datail_id')
result = get_userTest_logic(self.session,model_name,data_set)
self.write(JsonUtil.build_json_data(code=result['code'],
message=result['message'],
result = result['result']
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except IOError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_File_Error, JsonUtil.Constants.Msg_File_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
def post(self):
try:
file_path = self.get_argument('file_path',default=None)
result = add_userTest_logic(self.session,file_path,common)
self.write(JsonUtil.build_json_data(code=result['codes'],
message=result['message'],
))
self.flush()
self.finish()
except ValueError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Param_Error, JsonUtil.Constants.Msg_Param_Error))
self.flush()
except IOError as e:
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_File_Error, JsonUtil.Constants.Msg_File_Error))
self.flush()
except Exception as e:
# == 报异常,查询失败
self.write(JsonUtil.build_json(JsonUtil.Constants.Code_Error, JsonUtil.Constants.Msg_Error))
self.flush()
# -*- coding:utf-8 -*-
import pandas as pd
from utils import JsonUtil
from utils.tools import *
import datetime
import json
import hashlib
def add_models_logic(session,models_name,content=None,create_name=None,el_expression=None):
"""创建模型"""
if models_name:
if not session.find_single(models_name):
session.create_obj(models_name, content,create_name,el_expression)
return {'result':[],'code':JsonUtil.Constants.Code_Success, 'message':JsonUtil.Constants.Msg_Success}
else:
return {'result':[],'code':JsonUtil.Constants.Code_Success, 'message':JsonUtil.Constants.Msg_Success_ParamsIn}
return {'result':None,'code':JsonUtil.Constants.Code_Param_Error, 'message':JsonUtil.Constants.Msg_Param_Error}
def get_models_logic(session,models_name = None):
"""获取模型信息"""
if not models_name:
result = session.find_all()
else:
result = session.find_single(models_name)
if result:
df = pd.DataFrame(result)
df['create_at'] = df['create_at'].astype(str)
df['update_at'] = df['update_at'].astype(str)
result = df.to_dict(orient = 'records')
return {'result':result,'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success}
else:
return {'result':result,'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success_ParamsNot}
def add_features_logic(session,data=None):
"""添加特征信息"""
if data:
data_detail = None
for v in data:
if v['assert_name'] and v['feature_name']:
# l_featrue = session.find_single(v['feature_name'])
data_detail = session.find_single_assert(v['assert_name'])
# if data_detail
# if l_featrue :
# session.update_obj(feature_name = v['feature_name'])
# else:
# return {'result':None,'code':JsonUtil.Constants.Code_Param_Error,'message':JsonUtil.Constants.Msg_Param_Assert_Error}
if data_detail:
_df_detail = pd.DataFrame(data_detail)
_df_detail.loc[_df_detail['data_type'].isnull(),'data_type'] = 0
if 'data_round' not in _df_detail.columns.tolist():
_df_detail['data_round'] = 0
else:
_df_detail.loc[_df_detail['data_round'].isnull(),'data_round'] = 0
_df_detail.loc[_df_detail['assert_value'].isnull(),'assert_value'] = None
_df_detail['data_type'] = _df_detail['data_type'].astype(int)
_df_detail['data_round'] = _df_detail['data_round'].astype(int)
_df_data = pd.DataFrame(data)
_df_mer = pd.merge(_df_data,_df_detail,on='assert_name')
_data = _df_mer.to_dict(orient='records')
session.create_obj(_data)
return {'result':_data,'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success}
else:
return {'result':data,'code':JsonUtil.Constants.Code_Param_Error,'message':JsonUtil.Constants.Msg_Param_Error}
def get_features_logic(session,feature_name):
"""获取特征信息"""
if feature_name:
result = session.find_single(feature_name)
else:
# result = session.find_all()
return {'result':None,'code':JsonUtil.Constants.Code_Param_Error,'message':JsonUtil.Constants.Msg_Param_Error}
if result:
df = pd.DataFrame(result)
df['create_at'] = df['create_at'].astype(str)
df['update_at'] = df['update_at'].astype(str)
result = df.to_dict(orient = 'records')
return {'result':result,'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success}
return {'result':None,'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success_ParamsNot}
def set_modelsAndfeature_logic(session,data = None):
"""设置模型和特征挂钩"""
if isinstance(data,dict):
if data:
if 'data_basic' in data.keys() and 'data_detail' in data.keys():
session.create_obj(data['data_basic'],data['data_detail'])
return {'result':[],'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success}
return {'result':None,'code':JsonUtil.Constants.Code_Param_Error,'message':JsonUtil.Constants.Msg_Param_Structure_Error}
def create_dataSet_logic(session,model_name,common):
"""生成测试数据,创建测试数据目录备份测试数据文件"""
if model_name:
_json = session.find_single_FeatureData(model_name)
if not _json:
return {'result':None,'code':JsonUtil.Constants.Code_Param_Error,'message':JsonUtil.Constants.Msg_Param_Data_Error,'model_id':None}
else:
model_name = _json['model_name']
model_id = _json['model_id']
count = _json['user_count']
_df_json = pd.DataFrame(_json['data'])
feature_name = list(set(_df_json['feature_name']))
result = {}
for k ,v in _df_json.groupby('feature_name'):
_df = pd.DataFrame(v)
_df.sort_values(by=['feature_gdp','id'],inplace=True,ascending=False)
_df['user_count'] = round(count*_df['feature_gdp']/100,0)
_count_sum = _df['user_count'].astype(int).tolist()
_diff = sum(_count_sum) - count
_id = _df['id'].tolist()
_gdp = _df['feature_gdp'].tolist()
_type = _df['feature_type'].tolist()
_value = _df['feature_value'].tolist()
_data_type = _df['data_type'].tolist()
_data_round = _df['data_round'].tolist()
# _gdp = sorted(_gdp,reverse=True)
# _gdp_num = set_allocation_num(_gdp,_json['count'])
if _diff >0:
_df['user_count'] = lst_Covering( _df['user_count'].tolist(),_diff,istrue=False)
elif _diff < 0:
_df['user_count'] = lst_Covering( _df['user_count'].tolist(),abs(_diff))
_gdp_num = _df['user_count'].tolist()
_result = get_array(_id,_gdp_num,_type,_value,_data_type,_data_round)
result[k] = _result
_path ,_tree,_treeDetail = create_suiteFile(common,model_name,datetime.datetime.now())
_data = os.path.join(_treeDetail['path'],_treeDetail['name'][0],'data_%s.json'%(model_name))
_detail = os.path.join(_treeDetail['path'],_treeDetail['name'][1],'detail_%s.json'%(model_name))
with open(os.path.join(_path,_data), 'w',encoding='utf8') as f:
f.write(json.dumps(_json,indent=4,ensure_ascii=False))
f.close()
print("汇总写入完成...")
with open(os.path.join(_path,_detail), 'w',encoding='utf8') as f:
f.write(json.dumps(result,indent=4,ensure_ascii=False))
f.close()
print("明细写入完成...")
session.create_obj_single(model_id=int(model_id),model_name=model_name,
user_count = count,feature_name = ','.join(feature_name),
report_type = 1,data_path = _data)
session.create_obj_single(model_id=int(model_id),model_name=model_name,
user_count = count,feature_name = ','.join(feature_name),
report_type = 2,data_path = _detail)
data_set_datail_id_list = session.find_single(model_name=model_name,data_path=_detail,report_type = 2)
data_set_datail_id = None
if data_set_datail_id_list:
data_set_datail_id = data_set_datail_id_list[0]['id']
return {'result':{'data_path':_path , 'detail_path':_detail,'model_id':model_id,'data_set_datail_id':data_set_datail_id},
'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success}
def add_userTest_logic(session,file_path,common):
result = {}
if file_path:
with open(get_path(common,'suite',file_path), 'r',encoding='utf8') as f:
result = json.loads(f.read())
f.close()
_df_result = pd.DataFrame(result)
_df_result['result'] = _df_result.apply(lambda x : get_change(x),axis=1)
_df_result['batch_uuid'] = None
_df_result['batch_uuid'] = _df_result['batch_uuid'] .apply(lambda x : hashlib.md5(str(datetime.datetime.now()).encode(encoding='UTF-8')).hexdigest())
batch_uuid = _df_result['batch_uuid'].tolist()
data_detail = _df_result['result'].tolist()
set_detail = session.find_single_model(file_path)
session.create_obj_all(set_detail['id'],set_detail['model_name'],batch_uuid,data_detail)
return {'result':[],'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success,'set_datail_id':set_detail['id']}
def get_userTest_logic(session,model_name,data_set_datail_id):
result = session.find_single_data_set_datail_id(model_name,data_set_datail_id)
if result:
return {'result':result,'code':JsonUtil.Constants.Code_Success,'message':JsonUtil.Constants.Msg_Success}
else:
return {'result':result,'code':JsonUtil.Constants.Code_Param_Error,'message':JsonUtil.Constants.Msg_Param_Data_Error}
# -*- coding:utf-8 -*-
import tornado.web
# class MainHandler():
# def get(self, *args, **kwargs):
#
# self.render('index.html')
#
# class StoryHandler(tornado.web.RequestHandler):
# def get(self, story_id):
# self.write("You requested the story " + story_id)
#
# class BuyHandler(tornado.web.RequestHandler):
# def get(self):
# self.write("buy.wupeiqi.com/index")
from model_data_api.handler.Base_Handler import BaseHandler
from model_data_api.views import *
app = app()
@app.route('/api/users')
class UserstHandler(BaseHandler):
def get(self, *args, **kwargs):
self.write("UserstHandler")
def post(self,*args,**kwargs):
self.write("sdfsdfdsf")
from model_data_api.model.base_model import BaseManager
from model_data_api.model.tab_model import TabAssertData
class AssertDataManager(BaseManager):
def find_all(self):
return self.get_json(self.session.query(TabAssertData).all())
def find_single(self, name):
return self.get_json(self.session.query(TabAssertData).filter_by(assert_name = name))
def find_condition(self, **kwargs):
return self.session.query(TabAssertData).filter(**kwargs)
# def create_obj(self,assert_name,assert_content,assert_value,assert_type):
# p = TabAssertData(assert_name=assert_name,assert_content=assert_content,assert_value=assert_value,assert_type=assert_type)
# self.session.add(p)
# self.session.commit()
def create_obj(self,basic_data,raw_data):
p = []
for data in raw_data:
p.append(
TabAssertData(assert_name = basic_data['assert_name'],
assert_content =basic_data['assert_content'],
assert_type =data['assert_type'],
assert_value =data['assert_value'],
assert_gdp =data['assert_gdp'],
data_type = data['data_type'],
data_round = data['data_round']
))
self.session.add_all(p)
self.session.commit()
\ No newline at end of file
# -*- coding:utf-8 -*-
# 引入需要的模块
# from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
# 使用MySQL数据库
from collections import Iterable
import json,datetime
from datetime import date
# import pymysql
# pymysql.install_as_MySQLdb()
# 导入自己拆分的数据类型模块
# from . import models
# from model_data_api.models import *
from model_data_api.config import *
# 定义一个基础管理类
# Base = DB_Base()
class BaseManager:
def __init__(self):
# 创建数据库引擎
self.engine = create_engine(settings.DB_CONNECT_STRING['model_db'],echo=True,connect_args={"use_unicode":True,"charset":"utf8"})
# self.engine =DB_Base()
# self.engine = Base
# 创建连接会话
Session = sessionmaker(bind=self.engine)
self.session = Session()
# print(self.session)
# self.cls = cls
# 公用方法
def update_obj(self, **kw):
self.session.commit()
def delete_obj(self, **kw):
self.session.delete(**kw)
self.session.commit()
def find_single(self, **kw):
raise Exception("这个函数必须重写")
def find_condition(self, **kw):
raise Exception("这个函数必须重写")
def close(self):
self.session.close()
def create_obj(self,cls,**kw):
p = cls(**kw)
self.session.add(p)
self.session.commit()
pass
#
# def create_obj_all(self,cls,**kw):
# p = cls(**kw)
# self.session.add(p)
# self.session.commit()
# pass
def get_json(self,result):
try:
if isinstance(result, Iterable):
tmp = [dict(zip(res.__dict__.keys(), res.__dict__.values())) for res in result]
for t in tmp:
t.pop('_sa_instance_state')
else:
tmp = dict(zip(result.__dict__.keys(), result.__dict__.values()))
tmp.pop('_sa_instance_state')
return tmp
except BaseException as e:
print(e.args)
raise TypeError('Type error of parameter')
# def __default__(self,obj):
# if isinstance(obj, datetime):
# return obj.strftime('%Y-%m-%d %H:%M:%S')
# elif isinstance(obj, date):
# return obj.strftime('%Y-%m-%d')
# else:
# raise TypeError('%r is not JSON serializable' % obj)
#
# def json_dumps(self,_json):
# """ """
# return json.dumps(_json,ensure_ascii=False,default=self.__default)
# def get_json_params(self,*args):
# for i,values in args:
from model_data_api.model.base_model import BaseManager
from model_data_api.model.tab_model import TabDataSet,TabDataSetDatail,TabFeatureData,TabModelsData
class DataSetDetailManager(BaseManager):
def find_single_FeatureData(self, model_name):
query = self.session.query(TabDataSet.model_name,TabModelsData.id.label('model_id'),TabDataSet.user_count,
TabFeatureData.id,TabFeatureData.feature_name,
TabFeatureData.feature_type,TabFeatureData.feature_value,
TabFeatureData.feature_gdp,
TabFeatureData.data_type,
TabFeatureData.data_round
).join(TabFeatureData,
TabDataSet.feature_name==TabFeatureData.feature_name
).join(TabModelsData,TabModelsData.name ==TabDataSet.model_name
).filter(TabDataSet.model_name == model_name,TabFeatureData.is_active==1)
# values = self.session.execute(query).fetchall()
result = {}
for i ,res in enumerate(query):
if not result:
result=dict(result,**dict(zip(res.keys()[0:3],list(res[0:3]))))
result['data'] = []
result['data'].append(dict(zip(res.keys()[3:],list(res[3:]))))
# result = {}
# data = {}
# _len = len(query.all())-1
# for i,res in enumerate(query):
# if not result:
# result=dict(result,**dict(zip(res.keys()[0:2],list(res[0:2]))))
# result['data'] = []
# if not data or res[2] != data['feature_name']:
# try:
# if res[2] != data['feature_name']:
# result['data'].append(data)
# data = {}
# except:
# pass
# data = dict(data,**{res.keys()[2]:res[2],'detail':[]})
# data['detail'].append(dict(zip(res.keys()[3:],list(res[3:]))))
# if i >= _len:
# result['data'].append(data)
return result
def find_single(self, **kwargs):
return self.get_json(self.session.query(TabDataSetDatail).filter_by(**kwargs))
def find_all(self):
return self.get_json(self.session.query(TabDataSetDatail).all())
def find_condition(self, **kwargs):
return self.session.query(TabDataSetDatail).filter(**kwargs)
def create_obj_single(self,**kwargs):#(self,model_id,model_name,user_count,feature_name,deta_path):#,model_id,feature_name,set_name,set_data,set_number):
self.create_obj(TabDataSetDatail,**kwargs)
def create_obj_all(self,cls,**kw):
print()
from model_data_api.model.base_model import BaseManager
from model_data_api.model.tab_model import TabDataSet
import datetime
class DataSetManager(BaseManager):
def find_all(self):
return self.get_json(self.session.query(TabDataSet).all())
def find_single(self, id):
return self.get_json(self.session.query(TabDataSet).filter_by(model_id = int(id)))
def find_condition(self, **kwargs):
return self.session.query(TabDataSet).filter(**kwargs)
def create_obj(self,basic_data,raw_data):#,model_id,feature_name,set_name,set_data,set_number):
# p = []
# for data in raw_data:
# p.append(TabDataSet(model_name = basic_data['model_name'],
# user_count = int(basic_data['count']),
# feature_name=data['feature_name']
# ))
#
# self.session.add_all(p)
# self.session.commit()
create = []
updata = []
for data in raw_data:
query = self.session.query(TabDataSet).filter_by(model_name = basic_data['model_name'],feature_name = data['feature_name']).first()
if not query:
create.append(TabDataSet(model_name = basic_data['model_name'],
user_count = int(basic_data['count']),
feature_name=data['feature_name']
))
else:
self.session.query(TabDataSet).filter_by(model_name = basic_data['model_name'],feature_name = data['feature_name']).update({'user_count':int(basic_data['count']),'update_at':datetime.datetime.now()})
# updata.append(TabDataSet(model_name = basic_data['model_name'],
# user_count = int(basic_data['count']),
# feature_name=data['feature_name'],
# update_at = datetime.datetime.now()
# ).check_existing())
if create:
self.session.add_all(create)
# if updata:
# self.session.updata(updata)
self.session.commit()
from model_data_api.model.base_model import BaseManager
from model_data_api.model.tab_model import TabFeatureData
from model_data_api.model.tab_model import TabAssertData
import datetime
#
class FeatureDataManager(BaseManager):
def find_all(self):
return self.get_json(self.session.query(TabFeatureData).all())
def find_single(self, feature_name):
return self.get_json(self.session.query(TabFeatureData).filter_by(feature_name=feature_name))
def find_single_assert(self,assert_name):
result = []
query = self.session.query(TabAssertData.assert_name,TabAssertData.assert_content,
TabAssertData.assert_type,TabAssertData.assert_value,
TabAssertData.assert_gdp,TabAssertData.data_type,TabAssertData.data_round
).filter_by(assert_name=assert_name)
for i ,res in enumerate(query):
result.append(dict(zip(res.keys(),list(res))))
return result
def find_condition(self, **kwargs):
return self.session.query(TabFeatureData).filter(**kwargs)
def update_obj(self, feature_name):
# p = []
self.session.query(TabFeatureData).filter(TabFeatureData.feature_name==feature_name,TabFeatureData.is_active==1).update({TabFeatureData.is_active:2,TabFeatureData.update_at:datetime.datetime.now()})
self.session.commit()
def create_obj(self,data):
p = []
for v in data:
l_featrue = self.find_single(v['feature_name'])
if l_featrue:
self.update_obj(v['feature_name'])
p.append(
TabFeatureData(feature_name = v['feature_name'],
feature_content =v['feature_content'],
feature_type =v['assert_type'],
feature_value =v['assert_value'],
feature_gdp =v['assert_gdp'],
data_type = v['data_type'],
data_round = v['data_round']
))
self.session.add_all(p)
self.session.commit()
# p = TabFeatureData(feature_name=name,feature_content=content)
# self.session.add(p)
# self.update_obj()
from model_data_api.model.base_model import BaseManager
from model_data_api.model.tab_model import TabModelsData
class ModelsDataManager(BaseManager):
def find_all(self):
return self.get_json(self.session.query(TabModelsData).all())
def find_single(self, name):
return self.get_json(self.session.query(TabModelsData).filter_by(name = name))
def find_condition(self, **kwargs):
return self.session.query(TabModelsData).filter(**kwargs)
def create_obj(self,name, content,create_name ,el_expression):
p = TabModelsData(name=name,content=content,create_name=create_name,el_expression=el_expression)
self.session.add(p)
self.session.commit()
\ No newline at end of file
This diff is collapsed.
from model_data_api.model.base_model import BaseManager
from model_data_api.model.tab_model import TabUserTest,TabDataSetDatail
class UserTestDataManager(BaseManager):
def find_single_model(self,path):
# query= self.session.query(TabUserTest.data_set_datail_id,
# TabUserTest.model_name
# ).join(TabDataSetDatail,
# TabUserTest.data_set_datail_id==TabDataSetDatail.id
# ).filter(TabDataSetDatail.data_path==path).limit(1)
query = self.session.query(TabDataSetDatail.id,
TabDataSetDatail.model_name
).filter(TabDataSetDatail.data_path==path).limit(1)
result = {}
for i ,res in enumerate(query):
result = dict(zip(res.keys(),list(res)))
return result
# def find_single_features(self,model_name):
def find_single(self,batch_uuid):
query = self.session.query(TabUserTest.batch_uuid,TabUserTest.data_detail).filter_by(batch_uuid = batch_uuid)
result = {}
for i ,res in enumerate(query):
result = dict(zip(res.keys(),list(res)))
result['data_detail'] = eval(result['data_detail'])
return result
def find_single_data_set_datail_id(self,model_name,data_set_datail_id):
query = self.session.query(TabUserTest.batch_uuid,TabUserTest.data_detail).filter_by(model_name=model_name,data_set_datail_id = data_set_datail_id)
result = []
for i ,res in enumerate(query):
_r = dict(zip(res.keys(),list(res)))
_r['data_detail'] = eval(_r['data_detail'])
result.append(_r)
return result
def find_all(self):
return self.get_json(self.session.query(TabUserTest).all())
def find_condition(self, **kwargs):
return self.session.query(TabUserTest).filter(**kwargs)
def create_obj_all(self,set_datail_id,model_name,batch_uuid,raw_data):#(self,model_id,model_name,user_count,feature_name,deta_path):#,model_id,feature_name,set_name,set_data,set_number):
p = []
for id ,data in zip(batch_uuid,raw_data):
p.append(TabUserTest(
data_set_datail_id = int(set_datail_id),
model_name =model_name,
batch_uuid=id,
data_detail = str(data)
))
self.session.add_all(p)
self.session.commit()
import tornado.web
from model_data_test.settings import common
class RouterConfig(tornado.web.Application):
""" 重置Tornado自带的路有对象 """
def __init__(self):
tornado.web.Application.__init__(self,**common)
def route(self, url):
"""
:param url: URL地址
:return: 注册路由关系对应表的装饰器
"""
def register(handler):
"""
:param handler: URL对应的Handler
:return: Handler
"""
self.add_handlers(".*$", [(url, handler)]) # URL和Handler对应关系添加到路由表中
return handler
return register
__app__ = RouterConfig()
def app():
return __app__
import tornado.web
class RouterConfig(tornado.web.Application):
""" 重置Tornado自带的路有对象 """
def route(self, url):
"""
:param url: URL地址
:return: 注册路由关系对应表的装饰器
"""
def register(handler):
"""
:param handler: URL对应的Handler
:return: Handler
"""
self.add_handlers(".*$", [(url, handler)]) # URL和Handler对应关系添加到路由表中
return handler
return register
\ No newline at end of file
from celery import Celery
# from sqlalchemy import create_engine
# engine = create_engine(DATABASES)
from model_data_test import settings
from celery import Celery
app = Celery('model_data_test')
app.config_from_object('model_data_test.settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
# print(app)
# celery = Celery('tasks', broker='amqp://')
# celery.conf.CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', 'amqp')
#
# @celery.task(name='task.db_operation')
# def db_operation(id):
# # 耗时的数据库操作
# pass
\ No newline at end of file
# 项目配置信息模块
import os
from datetime import datetime
from tornado.options import options, define
from datetime import datetime
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
define("debug", default=True, help="enable debug mode",type=bool)
define("mode", default="debug", help="run mode")
define("port",default=23020,type=int)
define("ambient",default='dev')
# options.log_file_prefix = os.path.join(BASE_DIR, 'logs/model_data_api_%s.log'%(str(datetime.now()).split(' ')[0]))
options.parse_command_line()
DEBUG = options.debug
PORT = options.port
AMBIENT = options.ambient
if AMBIENT == 'dev' or AMBIENT =='test':
options.log_file_prefix = os.path.join(BASE_DIR, 'logs/%s.log'%(str(datetime.now()).split(' ')[0]))
else:
# '/home/quant_group/logs/model_data_api/model_data_api_%s.log'%(str(datetime.now()).split(' ')[0])
options.log_file_prefix = os.path.join(BASE_DIR,'/home/quant_group/logs/model_data_api/%s.log'%(str(datetime.now()).split(' ')[0]))
options.parse_command_line()
INSTALLED_APPS = [
'model_data_api'
]
DB_CONNECT_STRING = None
# if DEBUG:
if AMBIENT == 'dev' or AMBIENT =='test':
DB_CONNECT_STRING = { # 支持多个数据库
'model_db':'mysql+pymysql://root:root@127.0.0.1:3306/model_data_test?charset=utf8',
}
elif AMBIENT == 'online':
DB_CONNECT_STRING = { # 支持多个数据库
'model_db':'mysql+pymysql://qa:qatest@172.17.5.13:30267/model_data_test?charset=utf8',
}
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'), # 静态文件额外目录
)
common = dict(
static_path = os.path.join(BASE_DIR, "static"),
template_path= os.path.join(BASE_DIR, "templates"),
static_url_prefix=os.path.join(BASE_DIR, "/static/"),
autoreload = True,
DEBUG = DEBUG,
suite = os.path.join(BASE_DIR,"")
)
# # -*- coding:utf-8 -*-
#
# import tornado.web
# import tornado.ioloop
# from tornado.web import Application
# from model_data_api.handler import test_handler,ModelsData_Handler
# from model_data_test.settings import *
# from model_data_test.activator import process,processHandler
# from tornado.web import RequestHandler, url
# class LogFormatter(tornado.log.LogFormatter):
# def __init__(self):
# print('%(color)s[%(asctime)s %(filename)s:%(funcName)s:%(lineno)d %(levelname)s]%(end_color)s %(message)s')
# super(LogFormatter, self).__init__(
# fmt='%(color)s[%(asctime)s %(filename)s:%(funcName)s:%(lineno)d %(levelname)s]%(end_color)s %(message)s',
# datefmt='%Y-%m-%d %H:%M:%S'
# )
#
# class indexHandler(tornado.web.RequestHandler): #定义一个类,继承tornado.web下的RequestHandler类
# def get(self,num,nid): #get()方法,接收get方式请求
# print(num,nid)
# self.render("index.html") #显示index.html文件
#
#
# class Application(tornado.web.Application):
# def __init__(self):
# handlers = [
# (r"/index", test_handler.MainHandler),
# url(r"/story/([0-9]+)", test_handler.StoryHandler),
# url(r"/(?P<app>\w+)/(?P<function>\w+)", processHandler),
# # url(r'/api/model',ModelsData_Handler.ModelsDataHandler)
# ]
# settings = common
# tornado.web.Application.__init__(self, handlers,**settings)
#
#
#
#
#
# import tornado.web
# from model_data_test.settings import *
# from model_data_api.handler.test_handler import *
# class Application(tornado.web.Application):
# def __init__(self):
# handlers = []
# settings = common
# tornado.web.Application.__init__(self, handlers,**settings)
#
# application = Application()
# def decorator(view):
# URL = view.URL
# application.add_handlers('.*$', [(r'%s' % (URL), view)])
#
# @decorator
# class UserstHandler(tornado.web.RequestHandler):
# URL = '/users'
# def get(self, *args, **kwargs):
# self.write("UserstHandler")
#
# @decorator
# class IndexHandler(tornado.web.RequestHandler):
# URL = '/'
# def get(self, *args, **kwargs):
# self.write("IndexHandler")
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
import tornado.options
import tornado.web
from model_data_api import views
from model_data_test.settings import PORT,DEBUG
class LogFormatter(tornado.log.LogFormatter):
def __init__(self):
super(LogFormatter, self).__init__(
fmt='%(color)s[%(asctime)s %(filename)s:%(funcName)s:%(lineno)d %(levelname)s]%(end_color)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
if __name__ == "__main__":
if DEBUG:
app = views.app()
server = HTTPServer(app)
[i.setFormatter(LogFormatter()) for i in logging.getLogger().handlers]
# #== 本地调试
app.listen(PORT)
IOLoop.instance().start()
else:
app = views.app()
http_server = tornado.httpserver.HTTPServer(app)
[i.setFormatter(LogFormatter()) for i in logging.getLogger().handlers]
http_server.bind(PORT)
http_server.start(num_processes=0)
tornado.ioloop.IOLoop.instance().start()
# app = views.app()
# http_server = tornado.httpserver.HTTPServer(app)
# http_server.bind(PORT)
# http_server.start()
# tornado.ioloop.IOLoop.instance().start()
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
</body>
</html>
\ No newline at end of file
This diff is collapsed.
table#variables tr td, table#data tr td, table#header tr td, table#validate tr td,
table#extract tr td, table#hooks tr td, table#params tr td {
padding: 0px;
}
.tips {
color: rgba(0, 0, 0, 0.5);
padding-left: 10px;
}
.tips_true, .tips_false {
padding-left: 10px;
}
.tips_true {
color: green;
}
.tips_false {
color: red;
}
#pre_case li{
position: relative;
}
#pre_case i {
-webkit-transition: opacity .2s;
transition: opacity .2s;
opacity: 0;
display: block;
cursor: pointer;
color: #c00;
top: 10px;
right: 40px;
position: absolute;
font-style: normal;
}
#pre_case li:hover i{
opacity: 1;
}
#pre_config li{
position: relative;
}
#pre_config i {
-webkit-transition: opacity .2s;
transition: opacity .2s;
opacity: 0;
display: block;
cursor: pointer;
color: #c00;
top: 10px;
right: 40px;
position: absolute;
font-style: normal;
}
#pre_config li:hover i{
opacity: 1;
}
.el-header, .el-footer {
background-color: #B3C0D1;
color: #333;
text-align: center;
line-height: 60px;
}
.el-aside {
background-color: #D3DCE6;
color: #333;
text-align: left;
line-height: 200px;
}
.el-main {
background-color: #E9EEF3;
color: #333;
text-align: center;
/*line-height: 160px;*/
}
This source diff could not be displayed because it is too large. You can view the blob instead.
.el-table .warning-row {
background: oldlace;
}
.el-table .success-row {
background: #f0f9eb;
}
p {
text-indent: 5em;
/*font-family: sans-serif;*/
font-family:Georgia;
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
/* smooth scroll */
$(function () {
$('a[href*=#]:not([href=#])').click(function () {
if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') || location.hostname == this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html,body').animate({
scrollTop: target.offset().top
}, 1000);
return false;
}
}
});
});
/* scrollspy */
$('body').scrollspy({target: '#navbar-scroll'})
// Closes the Responsive Menu on Menu Item Click
$('.navbar-collapse ul li a').click(function () {
$('.navbar-toggle:visible').click();
});
/* carousel */
$(document).ready(function () {
$("#screenshots").owlCarousel({
items: 4,
itemsCustom: [
[0, 1],
[480, 2],
[768, 3],
[992, 4]
],
});
$("#owl-testi").owlCarousel
({
navigation: false, // Show next and prev buttons
slideSpeed: 300,
autoHeight: true,
singleItem: true
});
});
/* sticky navigation */
$(document).ready(function () {
$("#menu").sticky({topSpacing: 0});
});
jQuery(document).ready(function ($) {
// site preloader -- also uncomment the div in the header and the css style for #preloader
$(window).load(function () {
$('#preloader').fadeOut('slow', function () {
$(this).remove();
});
});
});
/* scrollToTop */
$(document).ready(function () {
//Check to see if the window is top if not then display button
$(window).scroll(function () {
if ($(this).scrollTop() > 500) {
$('.scrollToTop').fadeIn();
} else {
$('.scrollToTop').fadeOut();
}
});
//Click event to scroll to top
$('.scrollToTop').click(function () {
$('html, body').animate({scrollTop: 0}, 800);
return false;
});
});
/* parallax background image http://www.minimit.com/articles/lets-animate/parallax-backgrounds-with-centered-content
/* detect touch */
if ("ontouchstart" in window) {
document.documentElement.className = document.documentElement.className + " touch";
}
if (!$("html").hasClass("touch")) {
/* background fix */
$(".parallax").css("background-attachment", "fixed");
}
/* fix vertical when not overflow
call fullscreenFix() if .fullscreen content changes */
function fullscreenFix() {
var h = $('body').height();
// set .fullscreen height
$(".content-b").each(function (i) {
if ($(this).innerHeight() <= h) {
$(this).closest(".fullscreen").addClass("not-overflow");
}
});
}
$(window).resize(fullscreenFix);
fullscreenFix();
/* resize background images */
function backgroundResize() {
var windowH = $(window).height();
$(".landing, .action, .contact, .subscribe").each(function (i) {
var path = $(this);
// variables
var contW = path.width();
var contH = path.height();
var imgW = path.attr("data-img-width");
var imgH = path.attr("data-img-height");
var ratio = imgW / imgH;
// overflowing difference
var diff = parseFloat(path.attr("data-diff"));
diff = diff ? diff : 0;
// remaining height to have fullscreen image only on parallax
var remainingH = 0;
if (path.hasClass("parallax") && !$("html").hasClass("touch")) {
var maxH = contH > windowH ? contH : windowH;
remainingH = windowH - contH;
}
// set img values depending on cont
imgH = contH + remainingH + diff;
imgW = imgH * ratio;
// fix when too large
if (contW > imgW) {
imgW = contW;
imgH = imgW / ratio;
}
//
path.data("resized-imgW", imgW);
path.data("resized-imgH", imgH);
path.css("background-size", imgW + "px " + imgH + "px");
});
}
$(window).resize(backgroundResize);
$(window).focus(backgroundResize);
backgroundResize();
/* set parallax background-position */
function parallaxPosition(e) {
var heightWindow = $(window).height();
var topWindow = $(window).scrollTop();
var bottomWindow = topWindow + heightWindow;
var currentWindow = (topWindow + bottomWindow) / 2;
$(".parallax").each(function (i) {
var path = $(this);
var height = path.height();
var top = path.offset().top;
var bottom = top + height;
// only when in range
if (bottomWindow > top && topWindow < bottom) {
var imgW = path.data("resized-imgW");
var imgH = path.data("resized-imgH");
// min when image touch top of window
var min = 0;
// max when image touch bottom of window
var max = -imgH + heightWindow;
// overflow changes parallax
var overflowH = height < heightWindow ? imgH - height : imgH - heightWindow; // fix height on overflow
top = top - overflowH;
bottom = bottom + overflowH;
// value with linear interpolation
var value = min + (max - min) * (currentWindow - top) / (bottom - top);
// set background-position
var orizontalPosition = path.attr("data-oriz-pos");
orizontalPosition = orizontalPosition ? orizontalPosition : "50%";
$(this).css("background-position", orizontalPosition + " " + value + "px");
}
});
}
if (!$("html").hasClass("touch")) {
$(window).resize(parallaxPosition);
//$(window).focus(parallaxPosition);
$(window).scroll(parallaxPosition);
parallaxPosition();
}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
/*! WOW - v1.0.1 - 2014-08-15
* Copyright (c) 2014 Matthieu Aussaguel; Licensed MIT */(function(){var a,b,c,d=function(a,b){return function(){return a.apply(b,arguments)}},e=[].indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(b in this&&this[b]===a)return b;return-1};b=function(){function a(){}return a.prototype.extend=function(a,b){var c,d;for(c in b)d=b[c],null==a[c]&&(a[c]=d);return a},a.prototype.isMobile=function(a){return/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(a)},a}(),c=this.WeakMap||this.MozWeakMap||(c=function(){function a(){this.keys=[],this.values=[]}return a.prototype.get=function(a){var b,c,d,e,f;for(f=this.keys,b=d=0,e=f.length;e>d;b=++d)if(c=f[b],c===a)return this.values[b]},a.prototype.set=function(a,b){var c,d,e,f,g;for(g=this.keys,c=e=0,f=g.length;f>e;c=++e)if(d=g[c],d===a)return void(this.values[c]=b);return this.keys.push(a),this.values.push(b)},a}()),a=this.MutationObserver||this.WebkitMutationObserver||this.MozMutationObserver||(a=function(){function a(){console.warn("MutationObserver is not supported by your browser."),console.warn("WOW.js cannot detect dom mutations, please call .sync() after loading new content.")}return a.notSupported=!0,a.prototype.observe=function(){},a}()),this.WOW=function(){function f(a){null==a&&(a={}),this.scrollCallback=d(this.scrollCallback,this),this.scrollHandler=d(this.scrollHandler,this),this.start=d(this.start,this),this.scrolled=!0,this.config=this.util().extend(a,this.defaults),this.animationNameCache=new c}return f.prototype.defaults={boxClass:"wow",animateClass:"animated",offset:0,mobile:!0,live:!0},f.prototype.init=function(){var a;return this.element=window.document.documentElement,"interactive"===(a=document.readyState)||"complete"===a?this.start():document.addEventListener("DOMContentLoaded",this.start),this.finished=[]},f.prototype.start=function(){var b,c,d,e;if(this.stopped=!1,this.boxes=function(){var a,c,d,e;for(d=this.element.querySelectorAll("."+this.config.boxClass),e=[],a=0,c=d.length;c>a;a++)b=d[a],e.push(b);return e}.call(this),this.all=function(){var a,c,d,e;for(d=this.boxes,e=[],a=0,c=d.length;c>a;a++)b=d[a],e.push(b);return e}.call(this),this.boxes.length)if(this.disabled())this.resetStyle();else{for(e=this.boxes,c=0,d=e.length;d>c;c++)b=e[c],this.applyStyle(b,!0);window.addEventListener("scroll",this.scrollHandler,!1),window.addEventListener("resize",this.scrollHandler,!1),this.interval=setInterval(this.scrollCallback,50)}return this.config.live?new a(function(a){return function(b){var c,d,e,f,g;for(g=[],e=0,f=b.length;f>e;e++)d=b[e],g.push(function(){var a,b,e,f;for(e=d.addedNodes||[],f=[],a=0,b=e.length;b>a;a++)c=e[a],f.push(this.doSync(c));return f}.call(a));return g}}(this)).observe(document.body,{childList:!0,subtree:!0}):void 0},f.prototype.stop=function(){return this.stopped=!0,window.removeEventListener("scroll",this.scrollHandler,!1),window.removeEventListener("resize",this.scrollHandler,!1),null!=this.interval?clearInterval(this.interval):void 0},f.prototype.sync=function(){return a.notSupported?this.doSync(this.element):void 0},f.prototype.doSync=function(a){var b,c,d,f,g;if(!this.stopped){if(null==a&&(a=this.element),1!==a.nodeType)return;for(a=a.parentNode||a,f=a.querySelectorAll("."+this.config.boxClass),g=[],c=0,d=f.length;d>c;c++)b=f[c],e.call(this.all,b)<0?(this.applyStyle(b,!0),this.boxes.push(b),this.all.push(b),g.push(this.scrolled=!0)):g.push(void 0);return g}},f.prototype.show=function(a){return this.applyStyle(a),a.className=""+a.className+" "+this.config.animateClass},f.prototype.applyStyle=function(a,b){var c,d,e;return d=a.getAttribute("data-wow-duration"),c=a.getAttribute("data-wow-delay"),e=a.getAttribute("data-wow-iteration"),this.animate(function(f){return function(){return f.customStyle(a,b,d,c,e)}}(this))},f.prototype.animate=function(){return"requestAnimationFrame"in window?function(a){return window.requestAnimationFrame(a)}:function(a){return a()}}(),f.prototype.resetStyle=function(){var a,b,c,d,e;for(d=this.boxes,e=[],b=0,c=d.length;c>b;b++)a=d[b],e.push(a.setAttribute("style","visibility: visible;"));return e},f.prototype.customStyle=function(a,b,c,d,e){return b&&this.cacheAnimationName(a),a.style.visibility=b?"hidden":"visible",c&&this.vendorSet(a.style,{animationDuration:c}),d&&this.vendorSet(a.style,{animationDelay:d}),e&&this.vendorSet(a.style,{animationIterationCount:e}),this.vendorSet(a.style,{animationName:b?"none":this.cachedAnimationName(a)}),a},f.prototype.vendors=["moz","webkit"],f.prototype.vendorSet=function(a,b){var c,d,e,f;f=[];for(c in b)d=b[c],a[""+c]=d,f.push(function(){var b,f,g,h;for(g=this.vendors,h=[],b=0,f=g.length;f>b;b++)e=g[b],h.push(a[""+e+c.charAt(0).toUpperCase()+c.substr(1)]=d);return h}.call(this));return f},f.prototype.vendorCSS=function(a,b){var c,d,e,f,g,h;for(d=window.getComputedStyle(a),c=d.getPropertyCSSValue(b),h=this.vendors,f=0,g=h.length;g>f;f++)e=h[f],c=c||d.getPropertyCSSValue("-"+e+"-"+b);return c},f.prototype.animationName=function(a){var b;try{b=this.vendorCSS(a,"animation-name").cssText}catch(c){b=window.getComputedStyle(a).getPropertyValue("animation-name")}return"none"===b?"":b},f.prototype.cacheAnimationName=function(a){return this.animationNameCache.set(a,this.animationName(a))},f.prototype.cachedAnimationName=function(a){return this.animationNameCache.get(a)},f.prototype.scrollHandler=function(){return this.scrolled=!0},f.prototype.scrollCallback=function(){var a;return!this.scrolled||(this.scrolled=!1,this.boxes=function(){var b,c,d,e;for(d=this.boxes,e=[],b=0,c=d.length;c>b;b++)a=d[b],a&&(this.isVisible(a)?this.show(a):e.push(a));return e}.call(this),this.boxes.length||this.config.live)?void 0:this.stop()},f.prototype.offsetTop=function(a){for(var b;void 0===a.offsetTop;)a=a.parentNode;for(b=a.offsetTop;a=a.offsetParent;)b+=a.offsetTop;return b},f.prototype.isVisible=function(a){var b,c,d,e,f;return c=a.getAttribute("data-wow-offset")||this.config.offset,f=window.pageYOffset,e=f+Math.min(this.element.clientHeight,innerHeight)-c,d=this.offsetTop(a),b=d+a.clientHeight,e>=d&&b>=f},f.prototype.util=function(){return null!=this._util?this._util:this._util=new b},f.prototype.disabled=function(){return!this.config.mobile&&this.util().isMobile(navigator.userAgent)},f}()}).call(this);
\ No newline at end of file
{
"model_name": "tongdun_feature",
"model_id": 24,
"user_count": 100,
"data": [
{
"id": 8548,
"feature_name": "td_qu_rules",
"feature_type": "区间",
"feature_value": "1-100",
"feature_gdp": 20,
"data_type": 1,
"data_round": 0
},
{
"id": 8549,
"feature_name": "td_qu_rules",
"feature_type": "区间",
"feature_value": "0.1-1",
"feature_gdp": 15,
"data_type": 2,
"data_round": 6
},
{
"id": 8550,
"feature_name": "td_qu_rules",
"feature_type": "区间",
"feature_value": null,
"feature_gdp": 15,
"data_type": 3,
"data_round": 0
},
{
"id": 8551,
"feature_name": "td_qu_rules",
"feature_type": "异常",
"feature_value": "-9999999",
"feature_gdp": 10,
"data_type": 0,
"data_round": 0
},
{
"id": 8552,
"feature_name": "td_qu_rules",
"feature_type": "异常",
"feature_value": "None",
"feature_gdp": 10,
"data_type": 0,
"data_round": 0
},
{
"id": 8553,
"feature_name": "td_qu_rules",
"feature_type": "字符串",
"feature_value": "str",
"feature_gdp": 10,
"data_type": 0,
"data_round": 0
},
{
"id": 8554,
"feature_name": "td_qu_rules",
"feature_type": "整数",
"feature_value": "0",
"feature_gdp": 10,
"data_type": 0,
"data_round": 0
}
]
}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment