添加通用配置生成器
parent
537ae115d2
commit
a31f5c7661
|
|
@ -0,0 +1,8 @@
|
|||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="DuplicatedCode" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||
<Languages>
|
||||
<language minSize="98" name="Python" />
|
||||
</Languages>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyBroadExceptionInspection" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
|
||||
<inspection_tool class="PyPep8Inspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="E501" />
|
||||
<option value="E302" />
|
||||
<option value="E402" />
|
||||
<option value="E303" />
|
||||
<option value="E305" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||
<option name="ignoredErrors">
|
||||
<list>
|
||||
<option value="N801" />
|
||||
<option value="N806" />
|
||||
<option value="N802" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="PyRedeclarationInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||
<inspection_tool class="PyShadowingNamesInspection" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
|
||||
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredIdentifiers">
|
||||
<list>
|
||||
<option value="str.value" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
<inspection_tool class="StrFormatInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||
</profile>
|
||||
</component>
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9" project-jdk-type="Python SDK" />
|
||||
</project>
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/config-generator.iml" filepath="$PROJECT_DIR$/.idea/config-generator.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
import enum
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
curr_dir = os.path.split(os.path.abspath(__file__))[0]
|
||||
sys.path.append(os.path.join(curr_dir, 'firebase_tools'))
|
||||
sys.path.append(os.path.join(curr_dir, 'google_drive'))
|
||||
sys.path.append(os.path.join(curr_dir, 'ipm'))
|
||||
|
||||
import ipm.wechat_alert as wechat
|
||||
import ipm.clear_cdn as clear_cdn
|
||||
|
||||
from notification_helper import NotificationHelper
|
||||
from firebase_tools.firebase_helper import FirebaseHelperInstance, FirebaseHelper
|
||||
from google_drive.google_sheet import GoogleSheetHelper
|
||||
|
||||
firebase_helper = FirebaseHelperInstance()
|
||||
sheet_helper = GoogleSheetHelper()
|
||||
time_start = time.time()
|
||||
|
||||
def calc_step_time(step:str):
|
||||
global time_start
|
||||
time_end = time.time()
|
||||
print(f'{step}步骤完成|耗时: {(time_end - time_start):.0f}秒')
|
||||
time_start = time.time()
|
||||
|
||||
class env(enum.Enum):
|
||||
debug = 'debug' # 调试环境
|
||||
release = 'release' # 发布环境
|
||||
|
||||
class platform(enum.Enum):
|
||||
No = 'None'
|
||||
Android = 'Android'
|
||||
iOS = 'iOS'
|
||||
All = 'All'
|
||||
|
||||
def get_platform_list(platform_param):
|
||||
if platform_param == platform.All.value:
|
||||
return [platform.Android.value, platform.iOS.value]
|
||||
elif platform_param == platform.Android.value or platform_param == platform.iOS.value:
|
||||
return [platform_param]
|
||||
return None
|
||||
|
||||
# region 项目定义
|
||||
class project(enum.Enum):
|
||||
dof = 'dof'
|
||||
d2 = 'd2'
|
||||
find_out = 'find_out'
|
||||
find_master = 'find_master'
|
||||
find_it = 'find_it'
|
||||
|
||||
# endregion
|
||||
|
||||
# region firebase接口
|
||||
def get_firebase_instance(project_id) -> FirebaseHelper:
|
||||
if project_id == project.dof.value:
|
||||
return firebase_helper.get_firebase_dof()
|
||||
elif project_id == project.d2.value:
|
||||
return firebase_helper.get_firebase_d2()
|
||||
elif project_id == project.find_out.value:
|
||||
return firebase_helper.get_firebase_find_out()
|
||||
elif project_id == project.find_master.value:
|
||||
return firebase_helper.get_firebase_find_master()
|
||||
elif project_id == project.find_it.value:
|
||||
return firebase_helper.get_firebase_find_it()
|
||||
else:
|
||||
return None
|
||||
# endregion
|
||||
|
||||
# region 企业微信通知接口
|
||||
HOOK_URL = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=4d38d2a3-4fd7-41c3-a3b1-10af3ba639b4'
|
||||
notification = NotificationHelper(HOOK_URL)
|
||||
def wechat_alert():
|
||||
wechat.wechat_alert(notification.get_msg(), notification.get_hook_url(), notification.get_people_list())
|
||||
def wechat_alert_message(message: str):
|
||||
wechat.wechat_alert(message, notification.get_hook_url())
|
||||
def wechat_alert_exception(e: Exception):
|
||||
wechat.wechat_alert(str(e), notification.get_hook_url(), "15036516116")
|
||||
# endregion
|
||||
|
||||
def get_project_cdn(project_id):
|
||||
if project_id == project.dof.value:
|
||||
return 'https://cdn3-dof.fungame.cloud'
|
||||
elif project_id == project.d2.value:
|
||||
return 'https://cdn3-dof.fungame.cloud'
|
||||
elif project_id == project.find_out.value:
|
||||
return 'https://cdn3-find-out.fungame.cloud'
|
||||
elif project_id == project.find_master.value:
|
||||
return 'https://cdn3-find-master.fungame.cloud'
|
||||
elif project_id == project.find_it.value:
|
||||
return 'https://cdn3-find-it.fungame.cloud'
|
||||
else:
|
||||
return None
|
||||
|
|
@ -0,0 +1,391 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import config
|
||||
import utils
|
||||
|
||||
class CDNConfig:
|
||||
enable = False
|
||||
dir = ''
|
||||
filename = ''
|
||||
version = ''
|
||||
|
||||
def __init__(self, enable: bool, dir: str, filename: str, version: str):
|
||||
self.enable = enable
|
||||
self.dir = dir
|
||||
self.filename = filename
|
||||
self.version = version
|
||||
|
||||
class RemoteConfig:
|
||||
enable = False
|
||||
key = ''
|
||||
group = ''
|
||||
condition = ''
|
||||
|
||||
def __init__(self, enable, key, group, condition):
|
||||
self.enable = enable
|
||||
self.key = key
|
||||
self.group = group
|
||||
self.condition = condition
|
||||
|
||||
class ConfigGenerator:
|
||||
google_sheet_file_name = None
|
||||
sheet_table_name = None
|
||||
project = None
|
||||
platform = None
|
||||
env = None
|
||||
cdn_config = None
|
||||
remote_config = None
|
||||
sheet_helper = None
|
||||
firebase_helper = None
|
||||
|
||||
field_dict = {} # 字段名称字典
|
||||
field_type_dict = {} # 字段类型字典
|
||||
|
||||
# region 配置生成器初始化
|
||||
def __init__(self, google_sheet_file_name, sheet_table_name, project, platform, env):
|
||||
self.sheet_helper = config.sheet_helper
|
||||
self.google_sheet_file_name = google_sheet_file_name
|
||||
self.sheet_table_name = sheet_table_name
|
||||
self.project = project
|
||||
self.platform = platform.lower()
|
||||
self.env = env
|
||||
self.init_notification_param()
|
||||
self.init_config_generator_param()
|
||||
|
||||
def init_notification_param(self):
|
||||
config.notification.append_msg(f'生成配置文件[文件名: {self.google_sheet_file_name}, 表名: {self.sheet_table_name}]')
|
||||
config.notification.append_msg(f'配置构建参数[项目: {self.project}, 平台: {self.platform}, 环境: {self.env}]')
|
||||
|
||||
def init_config_generator_param(self):
|
||||
sheet = self.sheet_helper.get_sheet_table(self.google_sheet_file_name, self.sheet_table_name)
|
||||
if sheet is None:
|
||||
config.wechat_alert_exception(f'获取表格失败[文件名: {self.google_sheet_file_name}, 表名: {self.sheet_table_name}]')
|
||||
sheet_all_row_datas = sheet.get_all_values(major_dimension='ROWS')
|
||||
for i, row_values in enumerate(sheet_all_row_datas):
|
||||
if i == 0:
|
||||
self.parse_cdn_config(row_values)
|
||||
elif i == 1:
|
||||
self.parse_remote_config(row_values)
|
||||
elif i == 3:
|
||||
self.parse_field(row_values, self.field_dict)
|
||||
elif i == 4:
|
||||
self.parse_field_type(row_values, self.field_type_dict)
|
||||
else:
|
||||
break
|
||||
|
||||
def parse_cdn_config(self, row_values):
|
||||
enable_index = -1
|
||||
enable_value = None
|
||||
dir_index = -1
|
||||
dir_value = None
|
||||
filename_index = -1
|
||||
filename_value = None
|
||||
version_index = -1
|
||||
version_value = None
|
||||
|
||||
for i, cell_value in enumerate(row_values):
|
||||
if cell_value == 'enable':
|
||||
enable_index = i + 1
|
||||
elif cell_value == '目录':
|
||||
dir_index = i + 1
|
||||
elif cell_value == '文件名':
|
||||
filename_index = i + 1
|
||||
elif cell_value == '版本':
|
||||
version_index = i + 1
|
||||
|
||||
for i, cell_value in enumerate(row_values):
|
||||
if i == enable_index:
|
||||
if str(cell_value).lower() == 'true':
|
||||
enable_value = True
|
||||
elif i == dir_index:
|
||||
dir_value = cell_value
|
||||
elif i == filename_index:
|
||||
filename_value = cell_value
|
||||
elif i == version_index:
|
||||
version_value = cell_value
|
||||
self.cdn_config = CDNConfig(enable_value, dir_value, filename_value, version_value)
|
||||
|
||||
def parse_remote_config(self, row_values):
|
||||
pass
|
||||
enable_index = -1
|
||||
enable_value = False
|
||||
key_index = -1
|
||||
key_value = None
|
||||
group_index = -1
|
||||
group_value = None
|
||||
condition_index = -1
|
||||
condition_value = None
|
||||
for i, cell_value in enumerate(row_values):
|
||||
if cell_value == 'enable':
|
||||
enable_index = i + 1
|
||||
if cell_value == 'key':
|
||||
key_index = i + 1
|
||||
elif cell_value == 'group':
|
||||
group_index = i + 1
|
||||
elif cell_value == 'condition':
|
||||
condition_index = i + 1
|
||||
for i, cell_value in enumerate(row_values):
|
||||
if i == enable_index:
|
||||
if str(cell_value).lower() == 'true':
|
||||
enable_value = True
|
||||
elif i == key_index:
|
||||
remote_key = cell_value
|
||||
if '#platform#' in remote_key:
|
||||
remote_key = remote_key.replace('#platform#', self.platform)
|
||||
key_value = remote_key
|
||||
elif i == group_index:
|
||||
group_value = cell_value
|
||||
elif i == condition_index:
|
||||
condition_value = cell_value
|
||||
self.remote_config = RemoteConfig(enable_value, key_value, group_value, condition_value)
|
||||
|
||||
def parse_field(self, row_values, field_dict):
|
||||
for i, field_name in enumerate(row_values):
|
||||
if i == 0:
|
||||
continue
|
||||
if field_name != '':
|
||||
field_dict[i] = field_name
|
||||
print(str(field_dict))
|
||||
|
||||
def parse_field_type(self, row_values, field_type_dict):
|
||||
for i, field_type in enumerate(row_values):
|
||||
if i == 0:
|
||||
continue
|
||||
if field_type != '':
|
||||
field_type_dict[i] = field_type
|
||||
print(str(field_type_dict))
|
||||
# endregion
|
||||
|
||||
def gen_config_json(self):
|
||||
sheet = self.sheet_helper.get_sheet_table(self.google_sheet_file_name, self.sheet_table_name)
|
||||
sheet_all_row_datas = sheet.get_all_values(major_dimension='ROWS')
|
||||
config_json = {'datas': []}
|
||||
error_lines = []
|
||||
data_row_index = -1
|
||||
for i, row_values in enumerate(sheet_all_row_datas):
|
||||
if data_row_index == -1:
|
||||
if 'data' not in str(row_values[0]).lower():
|
||||
continue
|
||||
else:
|
||||
data_row_index = i
|
||||
|
||||
if self.is_row_env_valid(row_values) is False:
|
||||
continue
|
||||
|
||||
item_data, error = self.parse_row_data(i + 1, row_values, self.field_dict, self.field_type_dict)
|
||||
if error != '':
|
||||
error_lines.append(error)
|
||||
elif len(item_data) == 0:
|
||||
continue
|
||||
else:
|
||||
config_json['datas'].append(item_data)
|
||||
|
||||
if len(error_lines) > 0:
|
||||
config.notification.append_msg(str(error_lines))
|
||||
return
|
||||
|
||||
_json = json.dumps(config_json)
|
||||
filename = self.get_config_filename()
|
||||
local_file_path = self.get_local_config_file_path()
|
||||
utils.write_json_file(local_file_path, _json)
|
||||
config.notification.append_msg(f"{filename} 关卡配置生成成功!当前总关卡数:{len(config_json['datas'])}")
|
||||
config.wechat_alert(f'{filename} json:{_json}')
|
||||
print(f"{filename} json: {_json}")
|
||||
return _json
|
||||
|
||||
# region 行数据解析
|
||||
def is_row_env_valid(self, row_values):
|
||||
if self.env == config.env.debug.value:
|
||||
return True
|
||||
if row_values[1] == 'TRUE':
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse_row_data(self, row, row_values, field_dict, field_type_dict):
|
||||
item_data = {}
|
||||
error = ''
|
||||
for i, value in enumerate(row_values):
|
||||
if i not in field_dict or i not in field_type_dict:
|
||||
continue
|
||||
|
||||
field = field_dict[i]
|
||||
field_type = field_type_dict[i]
|
||||
isvalid, check_error = self.check_field_type_valid(field_type, value)
|
||||
if isvalid:
|
||||
item_data[field] = self.get_type_value(field_type, value)
|
||||
else:
|
||||
error += f'|{check_error}'
|
||||
if error != '':
|
||||
error = f"第{row}行->id:{item_data['id']}关卡配置错误: {error}"
|
||||
return item_data, error
|
||||
|
||||
def check_field_type_valid(self, field_type, value):
|
||||
if value is None:
|
||||
return False, f'值为空'
|
||||
|
||||
if field_type == '': # 默认值
|
||||
return True, ''
|
||||
|
||||
try:
|
||||
if field_type == 'int':
|
||||
int(value)
|
||||
elif field_type == 'float' or field_type == 'double':
|
||||
float(value)
|
||||
elif field_type == 'bool':
|
||||
if value.lower() not in ['true', 'false']:
|
||||
return False, f'布尔值格式错误: {value}'
|
||||
elif field_type == 'string':
|
||||
# 字符串类型不需要特殊验证
|
||||
pass
|
||||
elif field_type.startswith('List<'):
|
||||
inner_type = field_type[5:-1] # 提取 List<type> 中的 type
|
||||
if value == '':
|
||||
return True, '' # 空列表是有效的
|
||||
|
||||
items = value.split('#')
|
||||
for item in items:
|
||||
if item == '':
|
||||
continue
|
||||
valid, error = self.check_field_type_valid(inner_type, item)
|
||||
if not valid:
|
||||
return False, f'列表元素 {item} {error}'
|
||||
else:
|
||||
return False, f'未知类型: {field_type}'
|
||||
|
||||
return True, ''
|
||||
except ValueError:
|
||||
return False, f'类型转换错误: 无法将 {value} 转换为 {field_type}'
|
||||
except Exception as e:
|
||||
return False, f'验证错误: {str(e)}'
|
||||
|
||||
def get_type_value(self, field_type, value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if field_type == 'int':
|
||||
if value == '':
|
||||
return 0
|
||||
return int(value)
|
||||
elif field_type == 'float' or field_type == 'double':
|
||||
if value == '':
|
||||
return 0.0
|
||||
return float(value)
|
||||
elif field_type == 'bool':
|
||||
if value == '':
|
||||
return False
|
||||
return value.lower() == 'true'
|
||||
elif field_type == 'string':
|
||||
return value
|
||||
elif field_type.startswith('List<'):
|
||||
if value == '':
|
||||
return []
|
||||
inner_type = field_type[5:-1]
|
||||
items = value.split('#')
|
||||
result = []
|
||||
for item in items:
|
||||
if item == '':
|
||||
continue
|
||||
result.append(self.get_type_value(inner_type, item))
|
||||
return result
|
||||
else:
|
||||
# 未知类型,返回原始值
|
||||
return value
|
||||
# endregion
|
||||
|
||||
def upload_cdn(self):
|
||||
if self.cdn_config is None:
|
||||
config.notification.append_msg('cdn配置为空,不上传cdn')
|
||||
return
|
||||
|
||||
if self.cdn_config.enable is False:
|
||||
config.notification.append_msg('cdn配置未开启, 不上传cdn')
|
||||
return
|
||||
|
||||
filename = self.get_config_filename()
|
||||
local_file_path = self.get_local_config_file_path()
|
||||
if not os.path.exists(local_file_path):
|
||||
return
|
||||
storage_level_db_path = f"{self.cdn_config.dir}/{filename}"
|
||||
generation = config.get_firebase_instance(self.project).upload_file(local_file_path, storage_level_db_path)
|
||||
config.notification.append_msg(f"{filename}配置上传到cdn路径:{storage_level_db_path}成功")
|
||||
cdn_url = f'{config.get_project_cdn(self.project)}/{storage_level_db_path}?generation={generation}'
|
||||
config.notification.append_msg(f"配置最新下载链接url:{cdn_url}")
|
||||
return generation
|
||||
|
||||
def update_remote_config(self, generation=None):
|
||||
if self.remote_config is None:
|
||||
config.notification.append_msg('remote配置为空,不更新RemoteConfig')
|
||||
return
|
||||
|
||||
if self.remote_config.enable is False:
|
||||
config.notification.append_msg('remote配置未开启,不更新RemoteConfig')
|
||||
return
|
||||
|
||||
filename = self.get_config_filename()
|
||||
storage_level_db_path = f"{self.cdn_config.dir}/{filename}"
|
||||
cdn = config.get_project_cdn(self.project)
|
||||
if generation is None:
|
||||
generation = self.firebase_helper.get_file_generation(storage_level_db_path)
|
||||
level_db_cdn_url = f'{cdn}/{storage_level_db_path}' if generation is None else f'{cdn}/{storage_level_db_path}?generation={generation}'
|
||||
key = self.remote_config.key
|
||||
group = None if self.remote_config.group == '' else self.remote_config.group
|
||||
condition = None if self.remote_config.condition == '' else self.remote_config.condition
|
||||
if self.project is not None:
|
||||
config.get_firebase_instance(self.project).update_remote_config_json_value(group, condition, key, self.env, level_db_cdn_url, True)
|
||||
config.notification.append_msg(f"[group:{group}, condition:{condition}, key:{key} env:{self.env}] 云控更新成功")
|
||||
|
||||
def get_local_config_file_path(self):
|
||||
local_file_path = f'temp_config/{self.get_config_filename()}'
|
||||
return local_file_path
|
||||
|
||||
def get_config_filename(self):
|
||||
cdn_filename = self.cdn_config.filename
|
||||
if '#platform#' in cdn_filename:
|
||||
if self.platform == config.platform.All.value or self.platform == config.platform.No.value:
|
||||
cdn_filename = cdn_filename.replace('#platform#', '')
|
||||
else:
|
||||
cdn_filename = cdn_filename.replace('#platform#', self.platform)
|
||||
cdn_filename += f'-{self.env}.json'
|
||||
return cdn_filename
|
||||
|
||||
project_id = None
|
||||
platform = None
|
||||
env = None
|
||||
google_sheet_file_name = None
|
||||
sheet_table_name = None
|
||||
param_enable_upload_cdn = None
|
||||
param_enable_upload_remote_config = None
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
project_id = sys.argv[1]
|
||||
if len(sys.argv) > 2:
|
||||
platform = sys.argv[2]
|
||||
if len(sys.argv) > 3:
|
||||
env = sys.argv[3]
|
||||
if len(sys.argv) > 4:
|
||||
google_sheet_file_name = sys.argv[4]
|
||||
if len(sys.argv) > 5:
|
||||
sheet_table_name = sys.argv[5]
|
||||
if len(sys.argv) > 6:
|
||||
param_enable_upload_cdn = sys.argv[6]
|
||||
if len(sys.argv) > 7:
|
||||
param_enable_upload_remote_config = sys.argv[7]
|
||||
|
||||
if __name__ == "__main__":
|
||||
if project_id is None or platform is None or env is None or google_sheet_file_name is None or sheet_table_name is None:
|
||||
config.notification.append_msg(f'参数错误[project_id:{project_id}, platform:{platform}, env:{env}, '
|
||||
f'google_sheet_file_name:{google_sheet_file_name}, sheet_table_name:{sheet_table_name}]')
|
||||
exit(1)
|
||||
|
||||
config_generator = ConfigGenerator(google_sheet_file_name, sheet_table_name, project_id, platform, env)
|
||||
config_json = config_generator.gen_config_json()
|
||||
generation = None
|
||||
if str(param_enable_upload_cdn).lower() is 'true':
|
||||
generation = config_generator.upload_cdn()
|
||||
if str(param_enable_upload_remote_config).lower() is 'true':
|
||||
config_generator.update_remote_config(generation)
|
||||
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"type": "service_account",
|
||||
"project_id": "dof2-b9070",
|
||||
"private_key_id": "80b51c5076c03aa4dba9ad1c9c1a8ee50e31517b",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDDXEwSyb+/YLj1\nxa9OQz4Gex5JBnolvGQC+V8FcWFGq1V2VFSWNjUPA1EZy9TR3FqgdrxNYFUk7M1r\n4cdImbrobRhQFmPU/s/ejjG8jFtqEXBiiSRy1UXVHEY+GUBTuvivcZsV9Sz2HE3e\nRepm0Uuzzi92wujdqcEWNoIvtsUR3AIdfFTSHDyfal+rLjlGUwNZ+1PQ3JXA1CMZ\nw/Cvtt4SY26qDwYF83op5BiavTVbdO/FKrHvx4gCS3C8m4nWkU3d4V7frvvf1zOE\nZrgmYQFW8ftVEOWhg9hY/RnLwgvhBXpcx1HZ956hlWNcGliH2WxYd07jjCecWH2i\nLPaK6PdFAgMBAAECggEARgqKybekdBBQ7+jSu8v26mrsxkC4vVvIPZq7ex+VwTj/\nWH/OndZMepf1VTrTj/QhV+OKCnHNxQHTemjFwp0/OkKW02vbmgtLap7vbQqjYalj\n8if6NeOJ+Gh9NJloCzz/GSOxdxNibdBjCJxneDp0mkADm2AxDfvlGrorPcEY8Zgf\nUqfsHf4LBE+tYL0LqZXQ7LivE0K3M2UUcGkemthg3cs2fit8DCJS4ASG6KtWinOu\nEEmnQmdK9oKHAEcVylPnIEk9ssISZsvTeA2nGiEvo8WDcs0Z0qpdKSnkmob6k9UG\nEK+BGORaaNvYCRf/wP7cEBNjb5yWhStXCzYNkAZqxwKBgQDtbye6j6mI+UzFWEdh\nT+jR84Glub0ykuq3QW7CSQm8AiWVQk0uvO6CaKy533K3WAtV9QRtwOalk3vUPbv6\nyd1hXQT5H3pKNZkzshFLDoZkFix9KshcqC46ToH2q7uKp59Sf6sXqlOJKynMyfm9\nHItG5ICYOk+s8EPaLFOI1YMHzwKBgQDSou5vWBnAvx47kaBf0Fo/Z9z/cLM5sGap\n8rFxJUyybc8eJ9GX1eQMN73Asw/4u/FZkzn0GjyigHHtsGWK0VlRzL9qkmvM5itB\nsKqEKo1wuuZk4RxBZRCFiYkGlfy901pmn5sazxZ/NZ+eme2Qh4iZP5BII8e61Nol\nJVUbu2FAqwKBgQCgUhnlYvP6xTsNVdp83tQ6T84O9SfHamSKcOg0z9R+2oQ+EJLF\nKFV0QPHAYBy5p/2fgN4IreQPkVJfsX+oi32sLH8bKCIO0bosqeP7ZkdgdYh8y9KW\nngj43eHriXIM4bo2nnYMeLoHONn0qbyz8P4qVEhwgY8jGuj1/FA7aNAdCQKBgDko\nbXtZdOOQ+StBATv+1f6OuLG4a2okLDfB29TQQlfI5INSWtXQ/Okz1xILhTICDiQ4\nh2luFTYjlMbjLyrUVGCWJvtMcGRRsTtKLXtf8LG+hfap/3jfa/RQthZLbSI10Do4\n5WYI/BnY+0+WhZjKro6naQeqbfCNAgVCdC3qUyGxAoGBAM0eyfhmbhXWa0WmYUTb\nHCssWth+Ho3JsaKtFURv4ge7y8Zo1jvAZD/zxNIqnpwgQt0qBHN62LWW62tZjfZG\n9zbTx+F+oBXAs/6bLgmUtq5uIjaZmdxlMAP0XB47vIh05Vk0aRP25ZJbVvdDeH2X\n650CYdAWqcq6HfemrobXoI21\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "firebase-adminsdk-u80vp@dof2-b9070.iam.gserviceaccount.com",
|
||||
"client_id": "115792727331856285564",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-u80vp%40dof2-b9070.iam.gserviceaccount.com"
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"type": "service_account",
|
||||
"project_id": "find-differences-65e47",
|
||||
"private_key_id": "f8b22c8131e1028b5babd8e370aec172acbb652c",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCuL3v95XnhHq0l\nJ7nIStzXv++FZE2x65TXCyYII8hZhyga07kz2RQuQ2lsd9jRD/1MbHBLXd7wimQH\nnsYb0NHTw3WdAXYmW3As3QVqjJMTm3KsFC7ZXd35er7z3Pt7WbYtNXZmoATT61su\nqL9wXYhwD8L2n3Mp2Tiq7E0vSBzFodJHd+rxhFiKeJpKpmo45jojztM4lfiseYYJ\nEYQPF+RjdjWwhFWlh3mdRALogcSuUkvfEvhv9rxv2i826+NiSZTvH+qtSpOpbHyy\nraaA+23Qsznta7QqixuQ0CmpS6cPf+Jfx53RP4s7Lm28Qkz5ev0GK6GkFhg1VfIh\n5mqm6Fr1AgMBAAECggEAS6y8kTd3xoq1zZ0fNTn1DVTJYmoL6T8AyJaC3Oly2xaB\noXoXjcODZYALFsFQtK9eLCw2cz/ioh11zOu5hQ8pgfd6qw4d/xcdupgBiONu6IdS\nABm6qtwBmqgturPonBQDNkvyVy6xOkKXuEM+/LneE1V9rfjjJyzE1llYRTDusxtc\n9UZAK3jHYeam7JMa+0TA1J08WXIdub9agKiF23as6UdSoBZlCOp+ubbdqUXXqDZl\nD9BUmUfFx0X+Mj8TNS6nZ0TVsi0BoWeSbtzVOC3Ff1vBAv7rqGe1RiVy6ExrmPXW\nFjmPVhPqr6/3lexDCF4MlOem8SH9EPkgMcmPnMvNFwKBgQDoXCPOrL5KdsHsXZiI\n6nhBsR6eYdOapQiO8VvlcgoGYRQ7o18/fNXqiK3xXxY2hcPmmr5H+pDDrZXPbltc\nIgxvRCc/oX8qUXUEwGJ2hhJtb8WutaZitSWjWtBX3pQxlJ9oX11CS5OcpgqdjkuP\nm4ZWNhqVFh2p0mEU4eBo92KiPwKBgQC/6C4GRRRulzey/9vhQg9moGO/A1L4WqT9\neekNn2Q93Tqw3UV7AnsjiR1tNrAuRaIR+KBfeFAHQIyx0P7tJSZalfq7X7Yj5Zxz\n6PeueetBtP9J+v7ultOb/OrGeET+ZeRkS+YxGvHRUeu6gwxb+nj5fJPMqTkiU4/c\nYhtGqGuNywKBgCMCDtsKp7jsX5dPGrY2LVwoZWaTcqAwETQIJUJShjQj2DqMCMO1\n+TzWzu44103rpXJPCjAqDjmNI68W/2jIMHtrF0a8/D3R/AF1QuB8nrYiSvRhpenl\nRQBxpEY44Lb/n2zIQXLFlD3bvmmykKvYUMsYZ7TimiGF8t6EkZz++LtdAoGAWfL7\nz5iFcSiTxOTELRsHmPTIi/mE5D+oYd7Ia8gv9gdBwTCVpoiNKyTTlCM0BRxCUOYW\nVWlRKXUJ/TaWppTpG0B9G2JLPxwabwHdRfyDVnL9zbFF2JL9cHnlmU06GXusx44D\nwTR977zUHsHaAyqtlVLc++q9iudb4NJjMoUylG0CgYEAwJD2bLlkIxOEeWt6HZxO\n6Hr3ysmhrwyayyGswDrXY7xss8Himla3Q7AlqQYzQH5iwNoL4tPZ+4nXAtxwZZvX\nuQRUSTIrGzC516H7JnI3m5UPd3X7IPVMP6Ui1BgacaE8FB7PbGYttqlJtEfNeBFx\n//4YAJQpbpsaXKyPeFh4aqA=\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "firebase-adminsdk-3n2xs@find-differences-65e47.iam.gserviceaccount.com",
|
||||
"client_id": "109381139516717789089",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-3n2xs%40find-differences-65e47.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"type": "service_account",
|
||||
"project_id": "find-it-a08e5",
|
||||
"private_key_id": "f833603c7dd7dea5fb4cd13361a3a57a4e12dac2",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDVHLiNmwrOinYx\noTlgGFtYzZKYNNZhmkayEY++jqTHHOrJkZgbtKgCk69ZS/6ocU6Rsh3YUgGK8Duf\n8j2+yHVWFXooi9/IZ5/HraVzaJobZIfLagcCOXlVaxa8JC+VuehFEqpydF+I2PII\ng9LUYqEQUyvuWb6tUMxxUYOiV5AaxCLmAKR7yRkPPnqVroDqDONFLSDo9gf+doE6\n9Eh4PSWwHrzyk8sc8nxC9J14455bU3iYACLrtzN7dfAsONV1nhWbmYjyRhJpNfB6\noWiKUrQ+2yPtX7NPSSVAB55mlSnd/52GA7f4apGS0z4QAre7GihW3rk+eFhSr+P2\nD9/UXIYxAgMBAAECggEADiTFVNSU34dN99ZA5hFHwrmGAKcFEV45UekbdqSl5/cR\n2AXbJ/+R5PXjPdRu1sQKWF4PYAN1ScDO3DsqAObx7qAnRtidpgHGl2x9Xg7zp1zB\nLG1hgLLKzDH6xMrMA65ebXMjL8xsaLspOQpyrBjNorU6Zau8HWmO3/hueZBBJp19\nuKLdeUYlmurDAhgD2wu1nai4/fB+nw9CyF1NsA7fRgY7Ie4uoz75fJGRhofLK2Bv\nQ7mXwhtjNISznM5wuA6QuyXSuUP8RL7/5YRpO7kP1z89apcPmV1K/HLkx5uhs3WY\nlPG99VDO0H3ju1SN/sjX0CGeLQQvRDk1uWhiv9z3JwKBgQD40CXZ/r3qPuk/+Y8h\nZu9A6geiMRUwmbXtSCuMgnHmCYcZUPZrSFu4sUyJy670zpQ5WcGacWuV5Aw8c9jW\nc4/YDO9eL+9EXy7e2mNORt03bFDYMFcaAQnUiqdPPYYvhr/4A4HZnyMoKfAQMNe2\n6oabyAHlZK4iyD9UdT6iOgsIlwKBgQDbRJURZ0Jl7hBPt/15Z+PxyrDNKmLTAWhy\nxl8F+zwWi4SH3t3gwvvlhvD3VLluxU2GvtOXHxZcWofMw3dre3YqYv1H+gF9NuP7\nQljWqHDViBxAn9d4pOvONSIVZE27JvBN7X2qffX5l7JI9m3gGXsPBFEF9bnUhRXG\ntFcpSEm4dwKBgAHwpYromJR9/xWXnY0WH2FzMbILDe4+FJQQDv7eeYLgqiQBZJp7\nivOKtl0QsIGKJs+3qnHI1pzplUrXYGZzqjh2CVvDperzjwr/yuxuUwU+m/bDkbpL\nLcdxeVJ92VD/jPmjFV0cB4sloJCPwO7VL02BproiKdVtmEjRTTa4myNrAoGBAJuE\nykIcviBzuMgCFNtOT4OIotvx60QD5KQ7hNxGNHk9HZQNA8xOH8HNcw3J3vftIy6m\nHIc8OmzonRcjsIjgeZBYw5KQhQvdqdflWANwVw9sybUgagCc4M9SKkmH6gYK+Tgc\ncRxC2PrdmEtd1dKz3+9nvVv/zH0qo6Za9NNH/VnDAoGALLLI/pSJHlo0JLYdfVF1\nNQ69x1EF/iQR0LGjVLEdA/Lh+0KWG/cj/9+dxwKS3BSpySRxrhyW+P3ORozPF+JW\nHtZQALaxgESt8pTecnvhTpgFikSXhqKh5C9kz9ih12NgYi+lEbRKWAbSf5ny70CE\n76K1MnGyieVknBW8oYShnRE=\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "pipelineupdate@find-it-a08e5.iam.gserviceaccount.com",
|
||||
"client_id": "103397347800378546383",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/pipelineupdate%40find-it-a08e5.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"type": "service_account",
|
||||
"project_id": "infra-387702",
|
||||
"private_key_id": "1c49457f4057330ab45796ae4068b7e743a71ea2",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC3wcxP3hxoMe4s\nvAQUhfzp4lcykjF3WUR46CD+FYzfP4R7hTxg/0sgckzCmz0Oru1KkaxsrzVVLzSe\ntVzJ3vmiEBpE4D/0/KEjMBS5scM4f4EpS9OhGNGQLxaSurg6m125ZpYZREw2f/Sm\nrhlXwZdk8BElOX7o9PkC5sUH563/Ot0HOo6mqxVKNfTVo59CtNii82FmGe6+oen0\ntYyXsLw0sc+I4C7q4z8QFRP1p1xX6jtjaGjO5xo/PooBHBhii5GEdapPwfDFNhNY\ndpQI/ojDcCTFD7uQR+MU5LHL7YXJZxv6nzSMxgmbc8gbbbSaxe2BgBBjZ4aszgjE\n6ra8/hB5AgMBAAECggEAEsrlsJnWJSRNF8NbNfedfdRdZ1E68JofJe/UdvPAglDi\nRUkjG4bkWaNC3Sin6gVjBGwpOnzBKy7ullgVLxqIBGZZ72cRN2ZyOtFnHzYSMWgd\nzxm/gRNTFEgyimtA3dKHl2dJzLf2xOCZZkX6FsbYSzT/heNvSpcKE91Cs3wix+4i\naOUmYB44/Uxklv0PoxeSm0VfRiTVxW71plqk1qAx0YUKinpPgn+T8jDe5o0woRmP\nVoxVY2Sw0Top7k0Lt46qRj6ZHwGTZWPS4A8ZnkHIOCmkXNqEWCYaXQ/0Z49Pn8+K\n0IDX35DuLORtd2B4GGQ3AlobDZMkwN63PNmkOPHwUQKBgQDhe5O7O4Pc3cdSzBMv\n4fe5mXCDCixsYDVzPxYI65RG05zNvG1tCeWG++hW8eiEQiSNPUCP2zhMumZykcdg\nWOcR2zijICxcT+Y2jADWDYfIcZC6EMRRC7Dw6CueIxHScAwSEq5EODJ7EF+fIu+3\nSfF4t9cuFHhwdJYnKfYOIYkZcQKBgQDQoIcoAtyVG9R7BAZeQD2i7pCt5eZXX9oa\n8UrPBExgdYRm0euwdGmZb5GuOL4xUBM1tIlt/IvBV63CnR+ULrwdWXtPkl6FKsUd\n+CRhDbOC8li0lENF3POz/bOouei0MIEImJk2oeuJFo4Ps8ueNHZ2CTyCFlIhGZnf\nTS9VY/UjiQKBgHolZXvx25bUFVwG7QXtVTMXqdlys2nqHEpYDGm8pFBR5gmVX3nH\n+KzM3anr1dd/tSlF2ymSycbW2xSJYAYTYulLZj0H+r/vEHjQEsvh/IJa8tm4p4B3\n4tw+CRLbUJY82G0Z7YlLIB2GWyZr2Ivj5IlOnaR6tlvE7Go+0uoIxKohAoGBALMv\n/YYHFzBWq+RQq6uOH6JupwNFf9ax9CHJ90fhWDxNE0BAst9mzUBFDGoIQIsYTLeG\n7bby/5I7XXnW/EWI8nd+4KOSgu57srwv4gcd/n3M6xWGdlVuFj8U+5dCvjTtqBYb\nAxVic2vWXIPuGEg0pT5yZm8Q/s+BLqEVqT79UJKJAoGAFushetOSBZftrIe10/R8\nCaq8QJw4/OhCblqFqveb1WgvRDOvoax1X6yptY7WNIJ0kTjVIOcr2GqfldA3eNyS\nWkX9NzrX+kFHvpYAY+1RBo5P/wO8AzKEAukctQ2qseXs7mE8EircwSMGfjq8hp4I\nPZ79rr7R6YwUrodNqhs9n/Q=\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "find-master-remoteconfig@infra-387702.iam.gserviceaccount.com",
|
||||
"client_id": "106603798019848556340",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/find-master-remoteconfig%40infra-387702.iam.gserviceaccount.com",
|
||||
"universe_domain": "googleapis.com"
|
||||
}
|
||||
|
|
@ -0,0 +1,445 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import google.cloud.storage
|
||||
import requests
|
||||
|
||||
curr_dir = os.path.split(os.path.abspath(__file__))[0]
|
||||
print(curr_dir)
|
||||
sys.path.append(os.path.join(curr_dir, "../"))
|
||||
|
||||
import utils as utils
|
||||
import firebase_admin
|
||||
from firebase_admin import credentials, db
|
||||
from firebase_admin import storage, firestore
|
||||
from oauth2client.service_account import ServiceAccountCredentials
|
||||
|
||||
DOF_GOOGLE_SERVER_FILE = os.path.join(curr_dir, "find-differences-65e47.json")
|
||||
D2_GOOGLE_SERVER_FILE = os.path.join(curr_dir, "dof2-b9070.json")
|
||||
FindOut_GOOGLE_SERVER_FILE = os.path.join(curr_dir, "dof2-b9070.json")
|
||||
FindMaster_GOOGLE_SERVER_FILE = os.path.join(curr_dir, "find-master-387702.json")
|
||||
FindIt_GOOGLE_SERVER_FILE = os.path.join(curr_dir, "find-it-a08e5.json")
|
||||
|
||||
DOF_PROJECT_ID = "find-differences-65e47"
|
||||
D2_PROJECT_ID = "dof2-b9070"
|
||||
FindOut_PROJECT_ID = "dof2-b9070"
|
||||
FindMaster_PROJECT_ID = "find-master-88ffb"
|
||||
FindIt_PROJECT_ID = "find-it-a08e5"
|
||||
|
||||
BASE_URL = "https://firebaseremoteconfig.googleapis.com"
|
||||
|
||||
def get_remote_config_file(project_id):
|
||||
return os.path.join(curr_dir, f"remote_config_{project_id}.json")
|
||||
|
||||
def get_remote_config_url(project_id):
|
||||
return f"{BASE_URL}/v1/projects/{project_id}/remoteConfig"
|
||||
|
||||
class Singleton:
|
||||
_instances = {}
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super().__new__(cls)
|
||||
return cls._instances[cls]
|
||||
|
||||
class FirebaseHelperInstance(Singleton):
|
||||
def __init__(self):
|
||||
print("初始化")
|
||||
self.firebase_dof = None
|
||||
self.firebase_d2 = None
|
||||
self.firebase_find_out = None
|
||||
self.firebase_find_master = None
|
||||
self.firebase_find_it = None
|
||||
|
||||
def get_firebase_dof(self):
|
||||
if self.firebase_dof is None:
|
||||
self.firebase_dof = FirebaseHelper(DOF_PROJECT_ID, DOF_GOOGLE_SERVER_FILE)
|
||||
return self.firebase_dof
|
||||
|
||||
def get_firebase_d2(self):
|
||||
if self.firebase_d2 is None:
|
||||
self.firebase_d2 = FirebaseHelper(D2_PROJECT_ID, D2_GOOGLE_SERVER_FILE)
|
||||
return self.firebase_d2
|
||||
|
||||
def get_firebase_find_out(self):
|
||||
if self.firebase_find_out is None:
|
||||
self.firebase_find_out = FirebaseHelper(FindOut_PROJECT_ID, FindOut_GOOGLE_SERVER_FILE)
|
||||
return self.firebase_find_out
|
||||
|
||||
def get_firebase_find_master(self):
|
||||
if self.firebase_find_master is None:
|
||||
self.firebase_find_master = FirebaseHelper(FindMaster_PROJECT_ID, FindMaster_GOOGLE_SERVER_FILE)
|
||||
return self.firebase_find_master
|
||||
|
||||
def get_firebase_find_it(self):
|
||||
if self.firebase_find_it is None:
|
||||
self.firebase_find_it = FirebaseHelper(FindIt_PROJECT_ID, FindIt_GOOGLE_SERVER_FILE)
|
||||
return self.firebase_find_it
|
||||
|
||||
|
||||
class FirebaseHelper:
|
||||
def __init__(self, project_id, google_service_file):
|
||||
print(f'init--{project_id}')
|
||||
self.firebase_app = None
|
||||
self.storage_instance = None
|
||||
self.config = {}
|
||||
self.config["project_id"] = project_id
|
||||
self.GOOGLE_SERVER_FILE = google_service_file
|
||||
self.REMOTE_CONFIG_FILE = get_remote_config_file(project_id)
|
||||
self.REMOTE_CONFIG_URL = get_remote_config_url(project_id)
|
||||
self.init()
|
||||
|
||||
def init(self):
|
||||
self.init_firebase(self.config["project_id"])
|
||||
self.init_storage(self.config["project_id"])
|
||||
|
||||
def init_firebase(self, project_id):
|
||||
storage_bucket = "gs://" + project_id + ".appspot.com"
|
||||
databaseURL = "https://" + project_id + ".firebaseio.com/"
|
||||
cred = credentials.Certificate(self.GOOGLE_SERVER_FILE)
|
||||
self.firebase_app = firebase_admin.initialize_app(cred, {
|
||||
"databaseURL": databaseURL,
|
||||
"storageBucket": storage_bucket
|
||||
}, name=project_id)
|
||||
|
||||
self.refFirestore = firestore.client(app=self.firebase_app)
|
||||
print("初始 firebase 成功")
|
||||
|
||||
def init_storage(self, project_id):
|
||||
bucket = project_id + ".appspot.com"
|
||||
if self.firebase_app:
|
||||
self.storage_instance = storage.bucket(name=bucket, app=self.firebase_app)
|
||||
print("初始 storage 成功")
|
||||
|
||||
def get_files(self, prefix=""):
|
||||
print("prefix = " + prefix)
|
||||
blobs = self.storage_instance.list_blobs(prefix=prefix)
|
||||
return blobs
|
||||
|
||||
def get_files_match(self, prefix, match_glob):
|
||||
blobs = self.storage_instance.list_blobs(prefix=prefix, match_glob=match_glob)
|
||||
return blobs
|
||||
|
||||
def get_files_all_versions(self, prefix=""):
|
||||
print("prefix = " + prefix)
|
||||
blobs = self.storage_instance.list_blobs(prefix=prefix, versions=True)
|
||||
return blobs
|
||||
|
||||
def get_file(self, storage_file, generation=None):
|
||||
blob = self.storage_instance.get_blob(storage_file, generation=generation)
|
||||
return blob
|
||||
|
||||
def get_file_generation(self, storage_file):
|
||||
blob = self.storage_instance.get_blob(storage_file)
|
||||
if blob is None:
|
||||
return None
|
||||
else:
|
||||
return blob.generation
|
||||
|
||||
# 上传单个文件(会比较hash和md5码)
|
||||
def upload_single_bundle(self, local_file, storage_file, blob_dic, ext_meta={}, try_count=0):
|
||||
try_count = try_count + 1
|
||||
if try_count > 3:
|
||||
return False
|
||||
try:
|
||||
blob = None
|
||||
if storage_file in blob_dic:
|
||||
blob = blob_dic[storage_file]
|
||||
|
||||
is_same = False
|
||||
if blob is not None:
|
||||
local_hash = utils.calc_hash(local_file)
|
||||
local_md5 = utils.calc_md5(local_file)
|
||||
|
||||
storage_hash = ""
|
||||
if blob.metadata is not None and 'hash' in blob.metadata:
|
||||
storage_hash = blob.metadata['hash']
|
||||
|
||||
storage_md5 = ""
|
||||
if blob.metadata is not None and 'md5' in blob.metadata:
|
||||
storage_md5 = blob.metadata['md5']
|
||||
|
||||
is_same = storage_hash == local_hash and storage_md5 == local_md5
|
||||
|
||||
if not is_same:
|
||||
print(f"{local_file} 上传中..")
|
||||
self.upload_file(local_file, storage_file, ext_meta)
|
||||
else:
|
||||
print(f"{local_file} hash和md5码和storage文件一致,不重复上传..")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(local_file + " 上传失败,尝试重试,错误信息:" + repr(e))
|
||||
self.upload_single_bundle(local_file, storage_file, blob_dic, ext_meta, try_count)
|
||||
|
||||
def upload_thumbnail_image(self, local_file, storage_file, ext_meta={}, try_count=0):
|
||||
try_count = try_count + 1
|
||||
if try_count > 3:
|
||||
return False
|
||||
try:
|
||||
self.upload_file(local_file, storage_file, ext_meta)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(local_file + " 上传失败,尝试重试,错误信息:" + repr(e))
|
||||
self.upload_thumbnail_image(local_file, storage_file, ext_meta, try_count)
|
||||
|
||||
def upload_file(self, local_file, storage_file, ext_meta={}):
|
||||
try:
|
||||
upload_blob = self.storage_instance.blob(storage_file)
|
||||
meta = {
|
||||
'hash': utils.calc_hash(local_file),
|
||||
'md5': utils.calc_md5(local_file),
|
||||
}
|
||||
|
||||
meta.update(ext_meta)
|
||||
upload_blob.metadata = meta
|
||||
if local_file.endswith(".json"):
|
||||
utils.gzip_file(local_file)
|
||||
upload_blob.content_encoding = "gzip"
|
||||
upload_blob.upload_from_filename(local_file + '.gz')
|
||||
os.unlink(local_file + '.gz')
|
||||
else:
|
||||
upload_blob.upload_from_filename(local_file)
|
||||
blob = self.storage_instance.get_blob(storage_file)
|
||||
print(local_file + " 上传成功 generation = {}".format(blob.generation))
|
||||
return blob.generation
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
def update_metadata(self, storage_file, ext_meta={}):
|
||||
try:
|
||||
blob = self.storage_instance.get_blob(storage_file)
|
||||
if blob is not None:
|
||||
meta = blob.metadata
|
||||
meta.update(ext_meta)
|
||||
blob.metadata = meta
|
||||
blob.patch()
|
||||
print(storage_file + " 更新metadata成功")
|
||||
else:
|
||||
print(storage_file + " 文件不存在")
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
# region RemoteConfig更新
|
||||
|
||||
def get_access_token(self):
|
||||
file_value = ""
|
||||
with open(self.GOOGLE_SERVER_FILE, "r") as f:
|
||||
file_value = json.load(f)
|
||||
|
||||
credentials = ServiceAccountCredentials.from_json_keyfile_dict(file_value, [
|
||||
"https://www.googleapis.com/auth/firebase.remoteconfig"])
|
||||
access_token_info = credentials.get_access_token()
|
||||
return access_token_info.access_token
|
||||
|
||||
def get_remote_value(self):
|
||||
"""
|
||||
获取RemoteConfig配置,并写入到remote_config.json中
|
||||
:return: ETag
|
||||
"""
|
||||
try:
|
||||
headers = {
|
||||
"Authorization": "Bearer " + self.get_access_token()
|
||||
}
|
||||
resp = requests.get(self.REMOTE_CONFIG_URL, headers=headers)
|
||||
|
||||
if resp.status_code == 200:
|
||||
with io.open(self.REMOTE_CONFIG_FILE, "wb") as f:
|
||||
f.write(resp.text.encode("utf-8"))
|
||||
|
||||
print("remote config 写入完成: remote_config.json")
|
||||
print("ETag from server: {}".format(resp.headers["ETag"]))
|
||||
return resp.headers["ETag"]
|
||||
else:
|
||||
print("remote_config.json上传失败")
|
||||
print(resp.text)
|
||||
return None
|
||||
except Exception as e:
|
||||
print("获取 RemoteConfig值失败 " + repr(e))
|
||||
raise Exception("Fail")
|
||||
|
||||
def upload_remote_config_value(self, etag):
|
||||
"""
|
||||
上传 remote_config.json 文件至firebase后台
|
||||
:param: etag
|
||||
"""
|
||||
try:
|
||||
with open(self.REMOTE_CONFIG_FILE, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
print("开始上传 remote config:>" + content + "<")
|
||||
headers = {
|
||||
"Authorization": "Bearer " + self.get_access_token(),
|
||||
"Content-Type": "application/json; UTF-8",
|
||||
"If-Match": etag
|
||||
}
|
||||
resp = requests.put(self.REMOTE_CONFIG_URL, data=content.encode("utf-8"), headers=headers)
|
||||
if resp.status_code == 200:
|
||||
print("推送成功")
|
||||
print("ETag from server: {}".format(resp.headers["ETag"]))
|
||||
return True
|
||||
else:
|
||||
print("推送失败")
|
||||
print(resp.text)
|
||||
return False
|
||||
except Exception as e:
|
||||
print("更新 RemoteConfig值失败 " + repr(e))
|
||||
return False
|
||||
|
||||
def check_or_create_string_field(self, json_dict, keys, value):
|
||||
arr = keys.split("/")
|
||||
field = json_dict
|
||||
index = 0
|
||||
for item in arr:
|
||||
if index == len(arr) - 1:
|
||||
print("item = " + item)
|
||||
field[item] = value
|
||||
print(field[item])
|
||||
else:
|
||||
field = field[item]
|
||||
|
||||
index = index + 1
|
||||
|
||||
def check_or_create_json_value(self, json_dict, keys, value):
|
||||
arr = keys.split("/")
|
||||
field = json_dict
|
||||
index = 0
|
||||
for item in arr:
|
||||
if index == len(arr) - 1:
|
||||
print("item = " + item)
|
||||
field[item] = json.dumps(value)
|
||||
print(field[item])
|
||||
else:
|
||||
field = field[item]
|
||||
|
||||
index = index + 1
|
||||
|
||||
def check_or_create_json_field_value(self, json_dict, keys, sub_key, sub_value):
|
||||
arr = keys.split("/")
|
||||
field = json_dict
|
||||
index = 0
|
||||
for item in arr:
|
||||
if index == len(arr) - 1:
|
||||
print("item = " + item)
|
||||
value = json.loads(field[item])
|
||||
value[sub_key] = sub_value
|
||||
field[item] = json.dumps(value)
|
||||
print(field[item])
|
||||
else:
|
||||
field = field[item]
|
||||
|
||||
index = index + 1
|
||||
|
||||
def update_remote_config_json_value(self, group, condition, remote_key, value, is_upload=False):
|
||||
try:
|
||||
etag = None
|
||||
if is_upload or not os.path.exists(self.REMOTE_CONFIG_FILE):
|
||||
etag = self.get_remote_value()
|
||||
|
||||
remote_content = ""
|
||||
with open(self.REMOTE_CONFIG_FILE, "r") as f:
|
||||
remote_content = f.read()
|
||||
|
||||
if remote_content != None and remote_content != "":
|
||||
remote_content_json = json.loads(remote_content)
|
||||
keys = ""
|
||||
if group is not None and group != "":
|
||||
keys = f"parameterGroups/{group}/parameters/"
|
||||
else:
|
||||
keys = f"parameters/"
|
||||
|
||||
if condition is not None and condition != "":
|
||||
keys = keys + f"{remote_key}/conditionalValues/{condition}/value"
|
||||
else:
|
||||
keys = keys + f"{remote_key}/defaultValue/value"
|
||||
|
||||
self.check_or_create_json_value(remote_content_json, keys, value)
|
||||
|
||||
print("\n\n")
|
||||
print(remote_content_json)
|
||||
print("\n\n")
|
||||
# 将online_json写入到remote_config.json文件
|
||||
utils.write_json(self.REMOTE_CONFIG_FILE, remote_content_json)
|
||||
if is_upload:
|
||||
self.upload_remote_config_value(etag)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise Exception(f"[remote_config group:{group}, condition:{condition}, remote_key:{remote_key}] 更新失败")
|
||||
|
||||
def update_remote_config_json_field_value(self, group, condition, remote_key, json_field_key, value, is_upload=False):
|
||||
try:
|
||||
etag = None
|
||||
if is_upload or not os.path.exists(self.REMOTE_CONFIG_FILE):
|
||||
etag = self.get_remote_value()
|
||||
|
||||
remote_content = ""
|
||||
with open(self.REMOTE_CONFIG_FILE, "r") as f:
|
||||
remote_content = f.read()
|
||||
|
||||
if remote_content != None and remote_content != "":
|
||||
remote_content_json = json.loads(remote_content)
|
||||
keys = ""
|
||||
if group is not None and group != "":
|
||||
keys = f"parameterGroups/{group}/parameters/"
|
||||
else:
|
||||
keys = f"parameters/"
|
||||
|
||||
if condition is not None and condition != "":
|
||||
keys = keys + f"{remote_key}/conditionalValues/{condition}/value"
|
||||
else:
|
||||
keys = keys + f"{remote_key}/defaultValue/value"
|
||||
|
||||
self.check_or_create_json_field_value(remote_content_json, keys, json_field_key, value)
|
||||
|
||||
# 将online_json写入到remote_config.json文件
|
||||
utils.write_json(self.REMOTE_CONFIG_FILE, remote_content_json)
|
||||
if is_upload:
|
||||
self.upload_remote_config_value(etag)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise Exception(f"[remote_config group:{group}, condition:{condition}, remote_key:{remote_key}, json_field_key:{json_field_key}] 更新失败")
|
||||
|
||||
def update_remote_config_string_value(self, group, condition, remote_key, value, is_upload=False):
|
||||
try:
|
||||
etag = None
|
||||
if is_upload or not os.path.exists(self.REMOTE_CONFIG_FILE):
|
||||
etag = self.get_remote_value()
|
||||
|
||||
remote_content = ""
|
||||
with open(self.REMOTE_CONFIG_FILE, "r") as f:
|
||||
remote_content = f.read()
|
||||
|
||||
if remote_content != None and remote_content != "":
|
||||
remote_content_json = json.loads(remote_content)
|
||||
keys = ""
|
||||
if group is not None and group != "":
|
||||
keys = f"parameterGroups/{group}/parameters/"
|
||||
else:
|
||||
keys = f"parameters/"
|
||||
|
||||
if condition is not None and condition != "":
|
||||
keys = keys + f"{remote_key}/conditionalValues/{condition}/value"
|
||||
else:
|
||||
keys = keys + f"{remote_key}/defaultValue/value"
|
||||
|
||||
self.check_or_create_string_field(remote_content_json, keys, value)
|
||||
|
||||
print("\n\n")
|
||||
print(remote_content_json)
|
||||
print("\n\n")
|
||||
# 将online_json写入到remote_config.json文件
|
||||
utils.write_json(self.REMOTE_CONFIG_FILE, remote_content_json)
|
||||
if is_upload:
|
||||
self.upload_remote_config_value(etag)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise Exception(f"[remote_config group:{group}, condition:{condition}, remote_key:{remote_key}] 更新失败")
|
||||
|
||||
def GetInfor(self, targetFile: str) -> google.cloud.storage.bucket.Bucket:
|
||||
|
||||
blob = self.storage_instance.get_blob(targetFile)
|
||||
return blob
|
||||
|
||||
def GetGeneration(self, targetFile: str) -> str:
|
||||
return str(self.GetInfor(targetFile).generation)
|
||||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,96 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
import gspread
|
||||
from gspread import Worksheet
|
||||
from oauth2client.service_account import ServiceAccountCredentials
|
||||
|
||||
GoogleDrive = {
|
||||
"type": "service_account",
|
||||
"project_id": "quickstart-1616645350080",
|
||||
"private_key_id": "c27137e7a9bcb56debff4266d7f5e0b75d84e514",
|
||||
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQClKVOarC1QskWD\nHxoS/S/Gah0oge71Fd4jDLOAU7cqOWY5/gPzHysBKCoA+Hrq/LE/SzzwFnqfoZTJ\n7RsokQB07iRyipjv6cm1tfZvguA95LVEAgAcpbsIYa/3gDeZ1QvP6ntTlUMCUdza\nP4l/hl3U9ZNqLAa3axKGxeJug6845BgG3fe1gF12ukyqHecUO9Ys5lUB5HsGv32c\nsIezTPFnNRmXyTSPTC4ufeU8tUhF6D+B5/jnSk+Lt7qjHzVBhyHnx7Aa1hYI2KoX\nn5Pp2hD8vaJR6FxMN1S3YP1DQnsjavU/p/wkG0cca/MkVjDc4Wst+00KDmDQb/sY\ndCBw/5RNAgMBAAECggEADFPC0TMTqMe9h3VdUViDZdhKt9jG2JUawNv0U9orvCOP\nnTl32wATCiGQzQS+y+YzZol7kWHkIiEBxXaEhekYsyGJJ+FvW8zDyOO9coI+sW/u\nFZbeokS+ang8FYmE3N75ZDnYnZrw5u3sQX/nh9SkET6JE64YjD0aI2QGq//5JpJF\nBOWFcmRq+oC4b2MmxjbYYfASz9SioyGPmDM9pnj0qAsooh8lbNYG2ot+SXUmQ5G7\n9jcAXAQA/qhboPEWWZr2bRSHum1s7Tnp0nuVR6gPQ48THCyp+ozyZ51ggg7a+KIY\nqWK59VdzZHKadrMqrANDkZyFk8g4WEYOFbcoroOEsQKBgQDOKIVig9m66BoD8l/u\nuEVnPLueOabKdsavwOcTRhIAOmXYiA0Tjntt2UgFMO+yi4LZujRnB0SHv/j6+UPV\nM6+AOdMUOThto9HhWB2CWg+F1U4eYD064GPllC3uiL/hNAy5OdNrtseR6tlQAG+f\nbARm1GqFk9vVHVyTlpWAULfPxQKBgQDNF3EwUkoPV0wALMKX6V4NjYQx6CsjbN7V\ndbBKBgt+OEEt/yE9L9dwuf2mN7YA37zZUFNfeZUnTNAH2EsmInI6wlewvUK7b9eZ\n3FmZcFHV4hlqE8A2vJwk0U42nJuC9dP5bTL8ZFst9qtRdFwlnrZ3rZXzVEtmBKVt\npIDLAZMy6QKBgQCUx/ejZmZ/FjyYNpZ3UPN9kv4QLakqmte/RWc+qKYbFgokX+OY\nBo8bcuEgJfYHE9omSdTBuiQCGFCWx8flmPyCfLjR8o2/yqeQiqgZR+fF/W/4ShpG\nYGSX7f3MFVLtM0QvdQUYynty2ltk+juUgT8X+xq5NkFDp4IFXXqddSOCxQKBgBa4\nLhXIR+QDK6wpSTVC6ORfdPGCYqT9/oFvFCRfHw7QdIf/51K75gXa1LqBGWxnXKhG\nObYt5dQAslrsHwcOcdEIjmZJ0QaqkRu+ST6yLp6e+WnC3lwx8KozdZKfLqsHSIAt\nFKTZCTDCTqArX7nbJyOC20WlZOTcRucqfgn/FqthAoGAe7aZTcUxLbtltXki9nym\nOWQ5WTbwSJukzG8BWtG6vuvJf+tyUvEcMXw5cnrem58FrDUqt1NAS+jso1d+aB9n\n+DulSAgaec/8kYcAnx2EnDQlXceB94FOsjfKD/j2T7ONteieGLYleliXVdhWu/ay\nTIljZsJu1Lnnq49cS0TLgIU=\n-----END PRIVATE KEY-----\n",
|
||||
"client_email": "google-sheet@quickstart-1616645350080.iam.gserviceaccount.com",
|
||||
"client_id": "105134275998904574352",
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
||||
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-sheet%40quickstart-1616645350080.iam.gserviceaccount.com"
|
||||
}
|
||||
|
||||
google_drive_auth_scope = [
|
||||
'https://www.googleapis.com/auth/drive',
|
||||
'https://www.googleapis.com/auth/drive.file'
|
||||
]
|
||||
|
||||
def singleton(cls):
|
||||
_instance = {}
|
||||
|
||||
def inner():
|
||||
if cls not in _instance:
|
||||
_instance[cls] = cls()
|
||||
return _instance[cls]
|
||||
return inner
|
||||
|
||||
@singleton
|
||||
class GoogleSheetHelper:
|
||||
def __init__(self):
|
||||
cred = ServiceAccountCredentials.from_json_keyfile_dict(GoogleDrive, google_drive_auth_scope)
|
||||
self.googleDriveClient = gspread.authorize(cred)
|
||||
self.sheet_dict = {}
|
||||
|
||||
def open_sheet_file(self, sheet_file_name):
|
||||
if sheet_file_name in self.sheet_dict:
|
||||
return self.sheet_dict[sheet_file_name]
|
||||
else:
|
||||
spread_sheet = self.googleDriveClient.open(sheet_file_name)
|
||||
self.sheet_dict[sheet_file_name] = spread_sheet
|
||||
return spread_sheet
|
||||
|
||||
def get_sheet_table(self, sheet_file_name, sheet_table_name) -> Worksheet:
|
||||
try:
|
||||
spread_sheet = self.open_sheet_file(sheet_file_name)
|
||||
return spread_sheet.worksheet(sheet_table_name)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get_or_create_sheet_table(self, sheet_file_name, sheet_table_name, row_count=1, col_count=1) -> Worksheet:
|
||||
spread_sheet = self.open_sheet_file(sheet_file_name)
|
||||
try:
|
||||
return spread_sheet.worksheet(sheet_table_name), False
|
||||
except:
|
||||
return spread_sheet.add_worksheet(sheet_table_name, row_count, col_count), True
|
||||
|
||||
def get_sheet_row(self, sheet: Worksheet, row):
|
||||
if sheet is None:
|
||||
print('传入参数sheet异常 sheet is None')
|
||||
return
|
||||
if row < 0:
|
||||
print(f'传入参数row异常 row={row}')
|
||||
return
|
||||
return sheet.row_values(row)
|
||||
|
||||
def sheet_append_row(self, sheet: Worksheet, value):
|
||||
if sheet is None:
|
||||
print('传入参数sheet is None')
|
||||
return
|
||||
sheet.append_row(value, value_input_option='USER_ENTERED')
|
||||
|
||||
def sheet_update_cells_value(self, sheet: Worksheet, cell_range, values):
|
||||
if sheet is None:
|
||||
print('传入参数sheet is None')
|
||||
return
|
||||
for i, cell in enumerate(cell_range):
|
||||
cell.value = values[i]
|
||||
sheet.update_cells(cell_range)
|
||||
|
||||
def sheet_update_cell_value(self, sheet: Worksheet, row, col, value):
|
||||
if sheet is None:
|
||||
print('传入参数sheet is None')
|
||||
return
|
||||
sheet.update_cell(row, col, value)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
|
||||
import requests
|
||||
import json
|
||||
import urllib3
|
||||
|
||||
class clear_cdn_util:
|
||||
def __init__(self, x_app_id):
|
||||
self.url = "https://saas.castbox.fm/tool/api/v1/operate/batch/clearCdn"
|
||||
self.x_app_id = x_app_id
|
||||
self.domain_list = []
|
||||
self.clear_file_list = []
|
||||
|
||||
# domain:cdn3-find-master.fungame.cloud
|
||||
def appendDomain(self, domain):
|
||||
self.domain_list.append(domain)
|
||||
|
||||
# storage path:/xx/xx
|
||||
def appendClearFile(self, path):
|
||||
self.clear_file_list.append(path)
|
||||
|
||||
def clearCDN(self):
|
||||
try:
|
||||
if self.x_app_id == "":
|
||||
print("clearCDN failed!!! x_app_id is None")
|
||||
return
|
||||
|
||||
if len(self.domain_list) == 0:
|
||||
print("clearCDN failed!!! domain_list len is 0")
|
||||
return
|
||||
|
||||
if len(self.clear_file_list) == 0:
|
||||
print("clearCDN failed!!! clear_file_list len is 0")
|
||||
return
|
||||
|
||||
params = {
|
||||
"domainList": self.domain_list,
|
||||
"pathSuffixList": self.clear_file_list
|
||||
}
|
||||
data = json.dumps(params)
|
||||
print(data)
|
||||
urllib3.disable_warnings()
|
||||
r = requests.post(self.url,
|
||||
headers={"X-APP-ID": self.x_app_id, "Content-Type": "application/json"},
|
||||
data=data,
|
||||
verify=False
|
||||
)
|
||||
|
||||
print(r.status_code)
|
||||
print(str(r))
|
||||
if r.status_code == 200:
|
||||
print(f"{str(self.clear_file_list)} clearCDN OK!!! ")
|
||||
except Exception as e:
|
||||
print("clearCDN failed!!! error: " + repr(e))
|
||||
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/python
|
||||
# coding=utf-8
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
|
||||
def wechat_alert(message, hook_url, at_people_list=[]):
|
||||
pass
|
||||
webhook = hook_url
|
||||
header = {
|
||||
"Content-Type": "application/json",
|
||||
"Charset": "UTF-8"
|
||||
}
|
||||
|
||||
print(message)
|
||||
msg = {
|
||||
"msgtype": "text",
|
||||
"text": {
|
||||
"content": message,
|
||||
"mentioned_mobile_list": at_people_list,
|
||||
},
|
||||
}
|
||||
message_json = json.dumps(msg)
|
||||
info = requests.post(url=webhook, data=message_json, headers=header)
|
||||
print(info.text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
wechat_alert("测试机器人消息发送")
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
|
||||
class NotificationHelper:
|
||||
def __init__(self, hook_url):
|
||||
self.message = '########################################\n'
|
||||
self.hook_url = hook_url
|
||||
self.at_people_list = []
|
||||
|
||||
def append_at_people(self, people):
|
||||
if people not in self.at_people_list:
|
||||
self.at_people_list.append(people)
|
||||
|
||||
def get_people_list(self):
|
||||
return self.at_people_list
|
||||
|
||||
def clear_msg(self):
|
||||
self.message = '########################################\n'
|
||||
|
||||
def append_msg(self, msg):
|
||||
print(msg)
|
||||
self.message += msg + '\n'
|
||||
|
||||
def append_end_msg(self):
|
||||
self.message += '########################################\n'
|
||||
|
||||
def get_msg(self):
|
||||
return self.message
|
||||
|
||||
def get_hook_url(self):
|
||||
return self.hook_url
|
||||
|
|
@ -0,0 +1,254 @@
|
|||
#!/usr/bin/env python
|
||||
# coding:utf-8
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import gzip
|
||||
import shutil
|
||||
import subprocess
|
||||
import zipfile
|
||||
|
||||
curr_dir = os.path.split(os.path.abspath(__file__))[0]
|
||||
|
||||
def write_json_file(filename, json):
|
||||
"""
|
||||
将JSON内容写入指定文件
|
||||
:param filename: 文件路径
|
||||
:param json: 要写入的JSON内容
|
||||
:return: 无
|
||||
"""
|
||||
# 确保文件所在目录存在
|
||||
directory = os.path.dirname(filename)
|
||||
if directory and not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
with open(filename, "w") as f:
|
||||
f.write(json)
|
||||
f.close()
|
||||
|
||||
def write_json_file(folder, filename, json):
|
||||
if not os.path.exists(folder):
|
||||
os.mkdir(folder)
|
||||
f = open(folder + '/' + filename, "w")
|
||||
f.write(json)
|
||||
f.close()
|
||||
|
||||
|
||||
def gzip_file(_in_file):
|
||||
with open(_in_file, 'rb') as f_in:
|
||||
with gzip.open(_in_file + '.gz', 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
|
||||
|
||||
def zip_dir(dir_path, out_fullname):
|
||||
"""
|
||||
压缩指定文件夹
|
||||
:param dir_path: 目标文件夹路径
|
||||
:param out_fullname: 压缩文件报错路径+xxxx.zip
|
||||
:return: 无
|
||||
"""
|
||||
zip = zipfile.ZipFile(out_fullname, "w", zipfile.ZIP_DEFLATED)
|
||||
for path, dirnames, filenames in os.walk(dir_path):
|
||||
# 去掉目标跟路径,只对目标文件夹下边的文件及文件夹进行压缩
|
||||
fpath = path.replace(dir_path, '')
|
||||
for filename in filenames:
|
||||
zip.write(os.path.join(path, filename), os.path.join(fpath, filename))
|
||||
zip.close()
|
||||
|
||||
|
||||
def unzip_dir(zip_src, dst_dir):
|
||||
"""
|
||||
解压文件到指定文件夹
|
||||
:param zip_src: zip文件
|
||||
:param dst_dir: 解压目录
|
||||
:return: 无
|
||||
"""
|
||||
r = zipfile.is_zipfile(zip_src)
|
||||
if r:
|
||||
fz = zipfile.ZipFile(zip_src, 'r')
|
||||
for file in fz.namelist():
|
||||
fz.extract(file, dst_dir)
|
||||
else:
|
||||
print('%s This is not zip' % zip_src)
|
||||
|
||||
|
||||
def zip_file(file_path, out_fullname):
|
||||
"""
|
||||
压缩指定文件
|
||||
:param file_path: 目标文件路径
|
||||
:param out_fullname: 压缩文件保存路径+xxxx.zip
|
||||
:return: 无
|
||||
"""
|
||||
file_zip = zipfile.ZipFile(out_fullname, 'w')
|
||||
file_zip.write(file_path, compress_type=zipfile.ZIP_DEFLATED)
|
||||
file_zip.close()
|
||||
|
||||
|
||||
def unzip_file(zip_file_path, sp_path=""):
|
||||
"""
|
||||
解压文件
|
||||
:param zip_file_path: zip文件路径
|
||||
:param sp_path: 指定目录
|
||||
:return: 无
|
||||
"""
|
||||
file_zip = zipfile.ZipFile(zip_file_path)
|
||||
sp_path = sp_path if sp_path != "" else "{}/../".format(os.path.splitext(zip_file_path)[0])
|
||||
# 解压
|
||||
file_zip.extractall(path=sp_path)
|
||||
|
||||
|
||||
def get_file_last_line(f_name):
|
||||
"""
|
||||
:param f_name: f_name为所读xx.txt文件
|
||||
:return: 文件最后一行
|
||||
"""
|
||||
print(f_name)
|
||||
with open(f_name, 'r') as f: # 打开文件
|
||||
first_line = f.readline() # 读第一行
|
||||
off = -50 # 设置偏移量
|
||||
while True:
|
||||
f.seek(off, 2) # seek(off, 2)表示文件指针:从文件末尾(2)开始向前50个字符(-50)
|
||||
lines = f.readlines() # 读取文件指针范围内所有行
|
||||
if len(lines) >= 2: # 判断是否最后至少有两行,这样保证了最后一行是完整的
|
||||
last_line = lines[-1] # 取最后一行
|
||||
break
|
||||
# 如果off为50时得到的readlines只有一行内容,那么不能保证最后一行是完整的
|
||||
# 所以off翻倍重新运行,直到readlines不止一行
|
||||
off *= 2
|
||||
|
||||
print('文件' + f_name + '第一行为:' + first_line)
|
||||
print('文件' + f_name + '最后一行为:' + last_line)
|
||||
return last_line
|
||||
|
||||
|
||||
def open_json(path):
|
||||
dic = {}
|
||||
with open(path, 'r') as f:
|
||||
dic = json.load(f)
|
||||
|
||||
return dic
|
||||
|
||||
|
||||
def write_json(path, content):
|
||||
with open(path, 'w') as f:
|
||||
json.dump(content, f)
|
||||
|
||||
|
||||
def calc_hash(filepath):
|
||||
"""
|
||||
根据文件内容,生成hash值
|
||||
:param filepath: 文件路径
|
||||
:return: hash code
|
||||
"""
|
||||
with open(filepath, 'rb') as f:
|
||||
sha1obj = hashlib.sha1()
|
||||
sha1obj.update(f.read())
|
||||
hash_code = sha1obj.hexdigest()
|
||||
return hash_code
|
||||
|
||||
|
||||
def calc_md5(filepath):
|
||||
"""
|
||||
根据文件内容,生成md5码
|
||||
:param filepath: 文件路径
|
||||
:return: md5 code
|
||||
"""
|
||||
with open(filepath, 'rb') as f:
|
||||
md5obj = hashlib.md5(f.read())
|
||||
md5_code = md5obj.hexdigest()
|
||||
return md5_code
|
||||
|
||||
|
||||
def delete_files_with_extension(folder_path, extension):
|
||||
"""
|
||||
删除指定文件夹内指定后缀的文件
|
||||
:param folder_path: 文件夹路径
|
||||
:param extension: 后缀名称
|
||||
:return: 无
|
||||
"""
|
||||
# 获取指定文件夹内指定后缀的文件列表
|
||||
files = glob.glob(os.path.join(folder_path, f"*.{extension}"))
|
||||
# 遍历文件列表并删除文件
|
||||
for file_path in files:
|
||||
os.remove(file_path)
|
||||
|
||||
|
||||
def run_cmd(str_cmd, log_path=""):
|
||||
if len(log_path) > 1:
|
||||
logfile = open(log_path, "a")
|
||||
logfile.writelines("-----------------------------")
|
||||
logfile.writelines(str(str_cmd))
|
||||
|
||||
process = subprocess.Popen(
|
||||
str_cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
print(str_cmd)
|
||||
lines_out = process.stdout.readlines()
|
||||
for line in lines_out:
|
||||
print(line)
|
||||
if len(log_path) > 1:
|
||||
logfile.writelines(str(line))
|
||||
|
||||
lines_error = process.stderr.readlines()
|
||||
if len(log_path) > 1 and len(lines_error) > 0:
|
||||
logfile.writelines("has error:\n\n")
|
||||
for line in lines_error:
|
||||
print(line)
|
||||
if len(log_path) > 1:
|
||||
logfile.writelines(str(line))
|
||||
|
||||
print("end: " + str_cmd)
|
||||
if len(log_path) > 0:
|
||||
logfile.writelines("end: " + str_cmd)
|
||||
logfile.close()
|
||||
|
||||
return lines_out, lines_error
|
||||
|
||||
|
||||
def mkdirs(dir_path: str):
|
||||
if not os.path.exists(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
|
||||
|
||||
def clear_dirs(dir_path: str):
|
||||
if os.path.exists(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
os.makedirs(dir_path)
|
||||
|
||||
|
||||
def copy_dir(src_dir, dst_dir):
|
||||
if not os.path.exists(src_dir):
|
||||
return
|
||||
if not os.path.exists(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
for item in os.listdir(src_dir):
|
||||
src_item = os.path.join(src_dir, item)
|
||||
dst_item = os.path.join(dst_dir, item)
|
||||
if os.path.isdir(src_item):
|
||||
copy_dir(src_item, dst_item)
|
||||
else:
|
||||
shutil.copyfile(src_item, dst_item)
|
||||
|
||||
|
||||
def copy_file(src_thum_file, dst_thum_file):
|
||||
if not os.path.exists(src_thum_file):
|
||||
return
|
||||
shutil.copyfile(src_thum_file, dst_thum_file)
|
||||
|
||||
|
||||
def get_bundle_file_path(bundle_dir_path):
|
||||
"""
|
||||
在指定目录中查找.bundle后缀文件,返回文件路径,不遍历子目录
|
||||
:param bundle_dir_path: 目标文件夹路径
|
||||
:return: .bundle文件路径,如果没找到则返回None
|
||||
"""
|
||||
if not os.path.exists(bundle_dir_path):
|
||||
return None
|
||||
|
||||
for file in os.listdir(bundle_dir_path):
|
||||
file_path = os.path.join(bundle_dir_path, file)
|
||||
if os.path.isfile(file_path) and file.endswith('.bundle'):
|
||||
return file_path
|
||||
|
||||
return None
|
||||
Loading…
Reference in New Issue