zhangshenhao 7 months ago
commit
fa57abcb99
41 changed files with 7693 additions and 0 deletions
  1. 16 0
      .gitignore
  2. 239 0
      _model/_base.py
  3. 359 0
      _model/_update_utils.py
  4. 295 0
      _opt/algorithm/_utils/config_info.py
  5. 287 0
      _opt/algorithm/_utils/data_cleaner.py
  6. 671 0
      _opt/algorithm/_utils/data_service.py
  7. 11 0
      _opt/algorithm/_utils/data_summary.py
  8. 190 0
      _opt/algorithm/_utils/datetime_func.py
  9. 229 0
      _opt/algorithm/_utils/point_io.py
  10. 185 0
      _opt/algorithm/_utils/point_reader.py
  11. 46 0
      _opt/algorithm/_utils/wf_cache.py
  12. 134 0
      _opt/algorithm/_utils/wf_file_io.py
  13. 67 0
      _opt/algorithm/constrains/build.py
  14. 51 0
      _opt/algorithm/constrains/execute.py
  15. 98 0
      _opt/algorithm/constrains/parse.py
  16. 256 0
      _opt/algorithm/main.py
  17. 50 0
      _opt/algorithm/model/diagnosis.py
  18. 145 0
      _opt/algorithm/model/model.py
  19. 234 0
      _opt/algorithm/opt_alg.py
  20. 265 0
      _opt/algorithm/opt_obj.py
  21. 87 0
      _opt/algorithm/sim_config.py
  22. 226 0
      _opt/boundary/_utils/config_info.py
  23. 166 0
      _opt/boundary/_utils/data_cleaner.py
  24. 671 0
      _opt/boundary/_utils/data_service.py
  25. 11 0
      _opt/boundary/_utils/data_summary.py
  26. 190 0
      _opt/boundary/_utils/datetime_func.py
  27. 136 0
      _opt/boundary/_utils/point_reader.py
  28. 46 0
      _opt/boundary/_utils/wf_cache.py
  29. 182 0
      _opt/boundary/main.py
  30. 32 0
      _opt/boundary/sim_config.py
  31. 39 0
      components/_base_components.py
  32. 225 0
      components/coil.py
  33. 24 0
      components/mixed.py
  34. 234 0
      components/wheel.py
  35. 110 0
      doc/整体框架.drawio
  36. 162 0
      main.py
  37. 169 0
      main2.py
  38. 255 0
      model/DHU_1.py
  39. 356 0
      model/DHU_2.py
  40. 422 0
      model/DHU_3.py
  41. 122 0
      tools/enthalpy.py

+ 16 - 0
.gitignore

@@ -0,0 +1,16 @@
+.Rproj.user
+.Rhistory
+.RData
+.Ruserdata
+
+*.pkl
+*.pyc
+*.json
+*.pkl
+*.csv
+*.xls
+*.xlsx
+*.png
+*.docx
+*.exe
+*.o

+ 239 - 0
_model/_base.py

@@ -0,0 +1,239 @@
+import importlib
+import pickle
+from typing import Union
+from datetime import datetime
+
+import numpy as np
+import pandas as pd
+from sklearn.metrics import (
+    r2_score,
+    mean_absolute_error,
+    mean_absolute_percentage_error
+)
+
+try:
+    import plotnine as gg
+except:
+    pass
+
+
+class BaseModel:
+    
+    def __init__(self) -> None:
+        self.last_fit_y_true :Union[np.ndarray,None] = None
+        self.last_fit_y_pred :Union[np.ndarray,None] = None
+        self.model_info = {'LOAD_INFO':{}}
+    
+    def record_load_info(self,**info):
+        for attr_name,attr_value in info.items():
+            setattr(self,attr_name,attr_value)
+        self.model_info['LOAD_INFO'].update(info)
+    
+    def record_model(
+        self,
+        model_name  : str,
+        model       : dict,
+        train_data  : dict,
+        train_metric: dict,
+        keep_raw    : bool = False
+    ) -> None:
+        self.model_info[f'model_{model_name}'] = model
+        self.model_info[f'model_train_info_{model_name}'] = {}
+        self.model_info[f'model_train_info_{model_name}']['datetime'] = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+        self.model_info[f'model_train_info_{model_name}']['metric']   = train_metric
+        for name,value in train_data.items():
+            if value is None:
+                continue
+            if keep_raw:
+                self.model_info[f'model_train_info_{model_name}'][f'{name}_raw'] = value
+            self.model_info[f'model_train_info_{model_name}'][f'{name}_max'] = round(np.max(value),2)
+            self.model_info[f'model_train_info_{model_name}'][f'{name}_min'] = round(np.min(value),2)
+    
+    def _get_train_data(self,model_name) -> dict:
+        if not self.is_model_exist(model_name):
+            return {}
+        raw_train_data = {}
+        train_data_info = self.model_info[f'model_train_info_{model_name}']
+        for name,value in train_data_info.items():
+            if value is None:
+                continue
+            if name.endswith('_raw'):
+                name_adj = name.strip('_raw')
+                raw_train_data[name_adj] = value
+        return raw_train_data
+        
+    @property
+    def all_model_names(self):
+        names = []
+        for key in self.model_info:
+            if ('_train_info_' not in key) and ('model_' in key):
+                name = key.replace('model_','')
+                names.append(name)
+        return names
+    
+    def is_model_exist(self,model_name) -> bool:
+        if model_name in self.all_model_names:
+            return True
+        else:
+            return False
+    
+    def is_model_train_data_exist(self,model_name,data_name) -> bool:
+        if not self.is_model_exist(model_name):
+            return False
+        if f'{data_name}_max' in self.model_info[f'model_train_info_{model_name}']:
+            return True
+        else:
+            return False
+    
+    def save(self,path):
+        pd.to_pickle(self.model_info,path)
+    
+    @classmethod
+    def load(cls,path):
+        model_info = pd.read_pickle(path)
+        load_info  = model_info.get('LOAD_INFO',{})
+        model      = cls(**load_info)
+        model.model_info = model_info
+        return model
+    
+    def save_to_platform(
+        self,
+        version_id     : int,
+        model_id       : str,
+        update_method  : str,
+        model_info     : dict,
+        MODEL_FILE_PATH: str,
+        MODEL_FUNC_PATH: str,
+    ) -> None:
+        """
+        model_info = {
+            NAME:{
+                point_id   : ...,
+                point_name : ...,
+                point_class: ...,
+                thre_mae   : ...,
+                thre_mape  : ...,
+                thre_days  : ...,
+            }
+        }
+        """
+        self.save(MODEL_FILE_PATH)
+        
+        model_update_info = {}
+        for model_name in self.all_model_names:
+            if model_name not in model_info:
+                continue
+            model_update_info[model_name] = {
+                'metric' : {
+                    'MAE' : self.model_info[f'model_train_info_{model_name}']['metric']['MAE'],
+                    'MAPE': self.model_info[f'model_train_info_{model_name}']['metric']['MAPE'],
+                },
+                'point_id'   : model_info[model_name]['point_id'],
+                'point_name' : model_info[model_name]['point_name'],
+                'point_class': model_info[model_name]['point_class'],
+                'thre_mae'   : model_info[model_name]['thre_mae'],
+                'thre_mape'  : model_info[model_name]['thre_mape'],
+                'thre_days'  : model_info[model_name]['thre_days'],
+            }
+        from ._update_utils import update
+        update(
+            version_id      = version_id,
+            model_id        = model_id,
+            model_info      = model_update_info,
+            update_method   = update_method,
+            MODEL_FUNC_PATH = MODEL_FUNC_PATH,
+            MODEL_FILE_PATH = MODEL_FILE_PATH
+        )
+    
+    @classmethod
+    def load_from_platform(
+        cls,
+        reload   = False,
+        source   = 'file', # file / id
+        model_id = None,
+    ):
+        # 设备模型组件对应模型管理中的文件
+        if source == 'file':
+            MODEL_PACKAGE   = importlib.import_module(
+                '..models.model_func', package='.'.join(__name__.split('.')[:-1]))
+            if reload:
+                importlib.reload(MODEL_PACKAGE)
+            model_info     = getattr(MODEL_PACKAGE,'model')
+            
+        # 通过模型的id获取到模型文件
+        elif source == 'id':
+            if model_id is None:
+                raise Exception('必须输入模型的id')
+            from workflow_utils import get_model_version_file
+            try:
+                model_info = get_model_version_file(model_id=model_id,filename='model.pkl')
+                model_info = pickle.loads(model_info)
+            except Exception as e:
+                print(e)
+                raise Exception('模型文件获取失败')
+        
+        load_info  = model_info.get('LOAD_INFO',{})
+        model      = cls(**load_info)
+        model.model_info = model_info
+        
+        return model 
+    
+    def metric(self,y_true,y_pred,show=True):
+        mask   = ~(np.isnan(y_true) | np.isnan(y_pred))
+        y_true = y_true[mask]
+        y_pred = y_pred[mask]
+        r2     = r2_score(y_true,y_pred)
+        mae    = mean_absolute_error(y_true,y_pred)
+        mape   = mean_absolute_percentage_error(y_true,y_pred)
+        if show:
+            print(f'R2\t: {r2}\nMAE\t: {mae} \nMAPE\t: {mape}')
+        return {'R2':r2,'MAE':mae,'MAPE':mape}
+        
+    def last_metric(self):
+        y_true = self.last_fit_y_true
+        y_pred = self.last_fit_y_pred
+        return self.metric(y_true=y_true,y_pred=y_pred)
+    
+    def summary(self):
+        ...
+    
+    def plot_TVP(self):
+        plot = (
+            pd.DataFrame(
+                {
+                    'Real'   : self.last_fit_y_true.flatten(),
+                    'Predict': self.last_fit_y_pred.flatten()
+                }
+            )
+            .pipe(gg.ggplot)
+            + gg.aes(x='Real',y='Predict')
+            + gg.geom_point()
+            + gg.geom_abline(slope=1,intercept=0,color='red')
+        )
+        return plot 
+
+
+def plot_contour(data,x,y,z,labs=None):
+    data_pivot = (
+        data.pivot(index=x,columns=y,values=z)
+        .sort_index(axis=1,ascending=False)
+        .sort_index(axis=0,ascending=False)
+    )
+    X = np.repeat(data_pivot.index.values.reshape(-1,1),len(data_pivot.columns),axis=1)
+    Y = np.repeat(data_pivot.columns.values.reshape(1,-1),len(data_pivot.index),axis=0)
+    Z = data_pivot.values
+    
+    labs = {} if labs is None else labs
+    plot = (
+        data.pipe(gg.ggplot)
+        + gg.aes(x=x,y=y,fill=z)
+        + gg.geom_tile()
+        + gg.coord_cartesian(expand=False)
+        + gg.theme(legend_position='none')
+        + gg.labs(**labs)
+    )
+    fig     = plot.draw()
+    axs     = fig.get_axes()
+    contour = axs[0].contour(X, Y, Z, levels=10,colors='black',linewidths=1)
+    axs[0].clabel(contour, inline=True, fontsize=8)
+    return fig

+ 359 - 0
_model/_update_utils.py

@@ -0,0 +1,359 @@
+import json
+from datetime import datetime, timedelta
+
+from workflowlib import requests
+from workflow_utils import update_model_version_v3
+
+
+def update(
+    version_id     : int,
+    model_id       : str,
+    model_info     : dict,
+    update_method  : str,
+    MODEL_FUNC_PATH: str,
+    MODEL_FILE_PATH: str
+):
+    """
+
+    :param model_info:
+    {
+        'E':{
+            'metric' : {
+                'MAE' : ...,
+                'MAPE': ...,
+            },
+            'point_id'     : 'abc',
+            'point_name'   : 'abc',
+            'point_class'  : 'abc'
+            'thre_mae'     : '123',
+            'thre_mape'    : '123',
+            'thre_days'    : '123',
+        },
+        'AP':{
+            'metric' : {
+                'MAE' : ...,
+                'MAPE': ...,
+            },
+            'point_id'     : 'abc',
+            'point_name'   : 'abc',
+            'point_class'  : 'abc'
+            'thre_mae'     : '123',
+            'thre_mape'    : '123',
+            'thre_days'    : '123',
+        },
+    },
+    :param MODEL_FUNC_PATH:
+    :param MODEL_FILE_PATH:
+    :return:
+    """
+
+    factors = ['MAE', 'MAPE']  # 定义指标名称列表
+
+    # 模型管理设置点位
+    points_set_dict = {}
+    for key, value in model_info.items():
+        points_set_dict[key] = {
+            'point_id'   : value['point_id'],
+            'point_name' : value['point_name'],
+            'point_class': value['point_class']
+        }
+
+    update_model_points(points_set_dict, model_id)
+
+    # 利用模型ID获取模型信息
+    model_info_res = get_model_info(model_id=model_id)
+    device_name = model_info_res['device_name']  # 设备名称用于更新日志
+
+    # 模型训练指标写于日志
+    metrics_log = {}
+    for key, value in model_info.items():
+        metrics_log[key] = value['metric']  # 模型训练后的指标
+
+    # 点位列表
+    point_ids = []
+    for key, value in model_info.items():
+        point_ids.append(value['point_id'])
+
+    # 键值E/AP:点位,指标阈值
+    points_metrics = {}
+    for key, value in model_info.items():
+        points_metrics[key] = {
+            'point_id': value['point_id'],
+            'MAE': value['thre_mae'],
+            'MAPE': value['thre_mape']
+        }
+    # print("points_metrics", points_metrics)
+    metric_json = get_metrics_json(points_metrics)  # 正确的函数调用
+    # json_data = json.dumps(metric_json, indent=4)
+    # print("json_data:\n", json_data)
+
+    # 配置项中每个目标变量超出阈值的天数,{'E': '123', 'AP': '123'}
+    thre_days_dict = {}
+    for key, value in model_info.items():
+        thre_days_dict[key] = value['thre_days']
+
+    if update_method == 'any_metric':
+        print("【更新模式:基于监控指标进行更新】")
+
+        # 从获取的模型信息中读取版本列表,找到旧版本ID,用于读取监控指标
+        all_old_version_id = model_info_res['version_list']
+        old_version_id = next((_['id'] for _ in all_old_version_id), version_id)
+
+        all_keys_need_update = True
+        for key, days in thre_days_dict.items():
+            print("{:=^50s}".format(f"目标变量 {key} "))
+            print(f"处理目标变量 {key} 的过去时间:")
+            past_times_list = get_past_times(days)
+            # print(f"过去时间表: {past_times_list}")
+
+            # 获取监控指标
+            monitor_results = get_monitor_metric(past_times_list, old_version_id, factors, point_ids)
+            # print(f"目标点位的监控指标结果:{monitor_results}")
+
+            # 检查阈值是否超出
+            is_update_needed = check_threshold_update_model(monitor_results, points_metrics[key])
+            print(f"目标 {key} 是否需要更新模型:{is_update_needed}")
+
+            # 如果任何一个key的指标超出阈值,则将all_keys_need_update置为False
+            if is_update_needed:
+                all_keys_need_update = False
+
+        if all_keys_need_update:
+            print("【所有目标变量的所有指标均未超出阈值,不需要更新模型】")
+            return None
+        else:
+            print("【存在目标变量指标超出阈值,需要更新模型】")
+            version_update(model_id, MODEL_FUNC_PATH, MODEL_FILE_PATH, metric_json, version_id, device_name,
+                           metrics_log)
+    elif update_method == 'update':
+        print("【更新模式:强制更新】")
+        version_update(model_id, MODEL_FUNC_PATH, MODEL_FILE_PATH, metric_json, version_id, device_name, metrics_log)
+    else:
+        raise ValueError
+
+# 通过model_id读取模型版本界面信息
+def get_model_info(model_id):
+    res = requests.get(url=f"http://m2-backend-svc:8000/api/ai/model/get_details/{model_id}")
+    res = res.json()['result']
+    # print(f"模型版本信息: ", res)
+    return res
+
+
+# 计算过去时间
+def get_past_times(thre_days: int) -> list:
+    """
+    根据传入的天数计算过去几天的起始时间(从凌晨开始)。
+
+    Args:
+        thre_days (int): 要计算的天数范围。
+
+    Returns:
+        list: 包含过去几天的 datetime 对象列表。
+    """
+    thre_days = int(thre_days)
+    nowtime = datetime.now()
+    past_times = []
+    for day in range(1, thre_days + 1):
+        past_time = (nowtime - timedelta(days=day))
+        past_time = past_time.replace(hour=0, minute=0, second=0, microsecond=0)
+        past_times.append(past_time)
+    print(f"根据配置 {thre_days} 天计算的时间:", past_times)
+    return past_times
+
+
+# 获取监控指标
+def get_monitor_metric(past_times: list, version_id: str, factors: list, point_ids: list) -> dict:
+    """
+    获取时间内单体模型监控界面的指定模型指标。
+
+    Args:
+        past_times (list): 时间列表,包含时间段的 datetime 对象。
+        version_id (str): 模型版本ID。
+        factors (list): 指标列表,例如 ['MAE', 'MAPE']。
+        point_ids (list): 目标变量点位编号列表,例如 ['_E', '_AP']。
+
+    Returns:
+        dict: 包含每个 factor 的监控结果,格式如下:
+              {
+                  'MAE': [值1, 值2, ...],
+                  'MAPE': [值1, 值2, ...],
+              }
+    """
+    url = "http://m2-backend-svc:8000/api/ai/monitor/get_single_factor_sequence"
+    # monitor_metric = defaultdict(list)
+    monitor_metric = {}
+
+    for time_point in past_times:
+        formatted_time = time_point.strftime("%Y-%m-%d %H:%M:%S")
+        for point_id in point_ids:
+            if point_id not in monitor_metric:
+                monitor_metric[point_id] = {}
+            for factor in factors:
+                if factor not in monitor_metric[point_id]:
+                    monitor_metric[point_id][factor] = []
+                data = {
+                    "factor": factor,
+                    "point_id": point_id,
+                    "time_begin": formatted_time,
+                    "time_end": formatted_time,
+                    "version_id": version_id,
+                    "type": "DAILY"
+                }
+                try:
+                    response = requests.post(url=url, data=json.dumps(data))
+                    response.raise_for_status()  # 捕获 HTTP 请求异常
+                    result = response.json().get('results', [])
+                    if result:
+                        # monitor_metric[factor].append(result[0][1])  # 假定结果中目标值在 result[0][1]
+                        monitor_metric[point_id][factor].append(result[0][1])
+                    else:
+                        # monitor_metric[factor].append(0)  # 填充0
+                        monitor_metric[point_id][factor].append(0)
+                except (KeyError, IndexError, Exception) as e:
+                    print(f"Error fetching data for {factor}, {point_id}, {formatted_time}: {e}")
+                    # monitor_metric[factor].append(0)
+                    monitor_metric[point_id][factor].append(0)
+
+    return dict(monitor_metric)
+
+
+# 检查指标是否超出阈值
+def check_threshold_update_model(monitor_metric, points_metrics):
+    """
+    判断任意一种指标在配置时间内是否超出阈值
+
+    Args:
+        monitor_metric (dict): 模型的监控指标数据
+        points_metrics (dict): 指标阈值,如 {'point_id': 'abc', 'MAE': '123', 'MAPE': '123'}
+
+    Returns:
+        bool: 若任意一个指标的值超过阈值,则返回 True,否则返回 False
+    """
+    # print("监控指标结果:", monitor_metric)
+    # print("点位指标阈值:", points_metrics)
+
+    point_id = points_metrics['point_id']
+    any_greater = {}
+    if point_id in monitor_metric:
+        print(f"目标点位 {point_id} 的监控指标结果:{monitor_metric[point_id]}")
+        for factor, values in monitor_metric[point_id].items():
+            threshold = float(points_metrics.get(factor, float('inf')))
+            # 判断是否有指标超出阈值
+            any_greater[factor] = any(
+                value > threshold for value in values if value is not None
+            )
+    any_true_greater = any(value for value in any_greater.values())
+    print("监控模型指标是否超出阈值: ", any_greater)
+    return any_true_greater
+
+
+def get_metrics_json(points_metrics):
+    metric_json = []
+    for key, metrics in points_metrics.items():
+        # 提取阈值 MAE 和 MAPE
+        thre_mae = metrics.get('MAE', None)
+        thre_mape = metrics.get('MAPE', None)
+
+        # 更新 factors 列表中的 upr_limit
+        factors = [
+            {"factor": "MAE", "lwr_limit": None, "upr_limit": float(thre_mae) if thre_mae is not None else None,
+             "trained_value": None},
+            {"factor": "MBE", "lwr_limit": None, "upr_limit": None, "trained_value": None},
+            {"factor": "MSE", "lwr_limit": None, "upr_limit": None, "trained_value": None},
+            {"factor": "MdAE", "lwr_limit": None, "upr_limit": None, "trained_value": None},
+            {"factor": "std_MAE", "lwr_limit": None, "upr_limit": None, "trained_value": None},
+            {"factor": "MAPE", "lwr_limit": None, "upr_limit": float(thre_mape) if thre_mape is not None else None,
+             "trained_value": None},
+            {"factor": "std_MAPE", "lwr_limit": None, "upr_limit": None, "trained_value": None},
+        ]
+
+        metric_json.append({"factors": factors, "point_id": metrics.get("point_id")})
+
+    return metric_json
+
+
+def version_update(model_id, mod_func_path, mod_file_path, metric_json, new_version_id, device_name, metrics_log):
+    """
+    上传新模型版本,并生成上传日志
+    :param model_id: 模型ID
+    :param mod_func_path: 模型文件地址
+    :param mod_file_path: 模型文件地址
+    :param metric_json: 模型指标metric json
+    :param new_version_id: 新版本ID
+    :param device_name: 设备名称
+    :param metrics_log: 上传日志用到的指标log
+    :return:
+    """
+    filename_list = [
+        {"filename": mod_file_path},
+        {"filename": mod_func_path}
+    ]
+    # 上传模型新版本
+    update_model_version_v3(model_id, new_version_id, filename_list, workflow_id=None, factors=metric_json)
+    # 上传日志
+    device_name_value = f"设备名称:{device_name}, 模型文件:{model_id}, 更新后的指标:{metrics_log}"
+    # print(device_name_value)
+    r = requests.post(
+        "http://m2-backend-svc:8000/api/ai/sys_opt_log/create_one",
+        json={
+            "上传日志"   : "上传日志",
+            "user_id": 10,
+            "type"   : "模型自动迭代操作",
+            "log"    : device_name_value
+        }
+    )
+    return
+
+
+def update_model_points(points_set_dict, model_id):
+    """
+    points_set_dict: 包含点位信息的字典
+    model_id:        模型id
+    """
+    url = f"http://m2-backend-svc:8000/api/ai/model/get_details/{model_id}"
+    update_url = f"http://m2-backend-svc:8000/api/ai/model/update_info/{model_id}"
+
+    try:
+        print("{:=^50s}".format("设置模型文件点位"))
+        r = requests.get(url=url)
+        print(f"上传模型点位请求响应:{r}。")
+        r.raise_for_status()
+        result      = r.json().get('result', {})
+        device_data = result.get('device_data', {})
+        device_id   = result.get('device_id', {})
+
+        # 定义 points 列表
+        points = []
+        for key, value in points_set_dict.items():
+            points.append({
+                "point_id"   : value['point_id'],
+                "point_class": value['point_class'],
+                "name"       : value['point_name'],
+                "device_id"  : device_id
+            })
+
+        if device_data['point_list'] is None:
+            device_data['point_list'] = points
+        else:
+            for point in points:
+                existing_point = next(
+                    (p for p in device_data['point_list'] if p['point_class'] == point['point_class']), None)
+                if existing_point:
+                    existing_point['point_id'] = point['point_id']
+                    print(f"点位 {point['point_class']} 已存在,更新 point_id 为 {point['point_id']}。")
+                else:
+                    device_data['point_list'].append(point)
+                    print(f"添加新点位 {point}。")
+
+        update_r = requests.post(update_url, json=result)  # 上传result
+
+        if update_r.status_code == 200:
+            print(f'模型点位保存成功!')
+        else:
+            print(f'保存模型点位时出错:', update_r.status_code)
+
+    except Exception as e:
+        print(f"请求错误:{e}")
+        result = {}
+
+    return

+ 295 - 0
_opt/algorithm/_utils/config_info.py

@@ -0,0 +1,295 @@
+from typing import Union
+import pandas as pd
+
+class ConfigInfo:
+    def __init__(self,config) -> None:
+        self.config   = config
+        self.n_input  = len(self.get_io_id('in'))
+        self.n_output = len(self.get_io_id('out'))
+    
+    def get_io_id(self,io = 'in') -> list:
+        if io == 'in':
+            io_key = '_PORTS_IN'
+        elif io == 'out':
+            io_key = '_PORTS_OUT'
+        else:
+            raise Exception('WRONG io')
+        
+        input_id = [point['point_id'] for point in self.config[io_key]]
+        
+        return input_id
+    
+    def get_io_group_info(self,io='in',type:str='data',data:list=None) -> dict:
+        if io == 'in':
+            io_key = '_PORTS_IN_GROUP'
+        elif io == 'out':
+            io_key = '_PORTS_OUT_GROUP'
+        
+        group_info = {}
+        for group in self.config[io_key]:
+            name           = group['name']
+            start_idx      = group['start']
+            end_idx        = group['end']
+            
+            if type == 'data':
+                # group_info = {'G1':[DF1,DF2], 'G2':[DF3,DF4]}
+                if data is None:
+                    raise Exception('当type为data时,必须输入data参数')
+                info = data[start_idx:end_idx]
+            
+            elif type == 'point_id':
+                # group_info = {'G1':[point_1,point_2],'G2':[point_3,point_4]}
+                point_id = self.get_io_id(io=io)
+                info     = point_id[start_idx:end_idx]
+            
+            else:
+                raise Exception('WRONG type')
+            
+            group_info[name] = info
+        
+        return group_info
+    
+    def get_data_by_group_and_name(self,data:list,name:str,group:str=None,
+                                   allow_group_missing=False) -> list:
+        group_data = self.get_io_group_info(io='in',type='data',data=data)
+        group_name = self.get_io_group_info(io='in',type='point_id')
+        
+        if group is None:
+            group = list(group_name.keys())
+        elif isinstance(group,str):
+            group = [group]
+        elif isinstance(group,list):
+            pass
+        else:
+            raise Exception('WRONG')
+        
+        all_data = []
+        for each_group in group:
+            if each_group not in group_name:
+                if allow_group_missing == False:
+                    raise Exception(f'缺失{each_group}')
+                else:
+                    continue
+            data_idx   = group_name[each_group].index(name)
+            data_i     = group_data[each_group][data_idx]
+            
+            if isinstance(data_i,pd.DataFrame):
+                data_i = data_i.iloc[:,[0]].set_axis([each_group],axis=1)
+            all_data.append(data_i)
+            
+        return all_data
+    
+    def get_io_info_by_pc(self,io='in',type:str='data',data:list=None,drop_groups:list=[]) -> dict:
+        """
+        对组件中每个分组的点位进行分割,根据点位的编号重新分组
+        """
+
+        if io == 'in':
+            io_key = '_PORTS_IN_GROUP'
+        elif io == 'out':
+            io_key = '_PORTS_OUT_GROUP'
+
+        group_pc_info_all = self.get_io_group_info(io=io,type='point_id',data=data)
+        if drop_groups is None:
+            group_pc_info = group_pc_info_all
+        elif isinstance(drop_groups,list):
+            group_pc_info = {k:v for k,v in group_pc_info_all.items() if k not in drop_groups}
+
+        if not all([i==list(group_pc_info.values())[0] for i in group_pc_info.values()]):
+            raise Exception('请确保所有分组中点位的编号是一致的!')
+            
+        pc_num = len(list(group_pc_info.values())[0])
+        pc_info = {}
+        for count in range(pc_num):
+            info_list = []
+            for group_info in self.config[io_key]:
+                if group_info['name'] in drop_groups:
+                    continue
+                idx         = group_info['start'] + count
+                point_id    = self.get_io_id(io=io)[idx]
+                if type == 'data':
+                    data_info = data[idx]
+                    info_list.append(data_info)
+                elif type == 'point_id':
+                    group_name = group_info['name']
+                    info_list.append(group_name)
+            pc_info[point_id] = info_list
+        
+        return pc_info
+
+    def rename_df(self,dfs:list,io='in') -> list:
+        
+        result = []
+        point_id = self.get_io_id(io=io)
+        
+        if len(dfs) != len(point_id):
+            raise Exception(f'数据长度有误,point_id:{point_id},dfs:{dfs}')
+        
+        for p,df in zip(point_id,dfs):
+            if not isinstance(df,pd.DataFrame) or not isinstance(p,str):
+                result.append(df)
+            else:
+                result.append(
+                    df.iloc[:,[0]].set_axis([p],axis=1)
+                )
+        
+        return result
+    
+    def split_df_by_groupinfo(
+        self,
+        data_map:dict,
+        allow_data_map_is_subset:bool  = False,
+        allow_data_map_miss_group:list = None
+    ) -> list:
+        """
+        根据组件配置的输出分组和桩,将数据输出
+
+        Parameters
+        ----------
+        data_map : dict
+            分组名称及对应的数据 {分组1:DataFrame, 分组2:{桩A:Float,桩B:Bool}}
+
+        Returns
+        -------
+        list
+            数据列表
+
+        Raises
+        ------
+        Exception
+            分组名称超出了限定的范围
+        Exception
+            桩名称超出了限定的范围
+        """
+        output_groupinfo = self.get_io_group_info('out',type='point_id')
+        
+        split_data = []
+        
+        for group_name,points in output_groupinfo.items():
+            data = data_map.get(group_name)
+            
+            if data is None:
+                if isinstance(allow_data_map_miss_group,list) and group_name in allow_data_map_miss_group:
+                    continue
+                elif not allow_data_map_is_subset:
+                    raise Exception(f'组件输出的分组名称{group_name}有误,分组:{list(data_map.keys())}')
+                else:
+                    continue
+            
+            for p in points:
+                
+                if isinstance(data,pd.DataFrame):
+                    if p not in data.columns:
+                        raise Exception(f'组件输出的桩名称有误,未找到{p},桩:{data.columns.to_list()}')
+                    p_data = data.loc[:,[p]]
+                
+                elif isinstance(data,dict):
+                    if p not in data.keys():
+                        raise Exception(f'组件输出的桩名称有误,未找到{p},桩:{list(data.keys())}')
+                    p_data = data[p]
+                    
+                split_data.append(p_data)
+        
+        return split_data
+            
+    
+    def split_df(self,method:str,df:pd.DataFrame,by_group=None) -> list:
+        
+        if by_group is not None:
+            output_point_id = self.get_io_group_info(io='out',type='point_id')[by_group]
+            output_n = len(output_point_id)
+        else:
+            output_point_id = self.get_io_id(io='out')
+            output_n = self.n_output
+        
+        df_list = []
+        
+        if method == 'idx':
+            
+            if df.shape[1] != output_n:
+                raise Exception(f'输出数据的个数不等于原数据中的列数,以下是原数据中包含的列:{df.columns.to_list()}')
+            
+            for idx in range(output_n):
+                df_list.append(df.iloc[:,[idx]])    
+            
+        elif method == 'id':
+            for point_id in output_point_id:
+                if point_id not in df.columns:
+                    raise Exception(f'数据中没有{point_id},以下是数据中包含的列:{df.columns.to_list()}')
+                df_list.append(df.loc[:,[point_id]])
+        
+        return df_list
+    
+    def get_property(self,key:str,default=None):
+        # 获取组件配置的属性
+        if key not in self.config.keys():
+            return default
+        
+        property = self.config[key]
+        if property is None or property == '':
+            return default
+        
+        return property
+    
+    def check_property_exist(self,property_name:dict):
+        for param,param_name in property_name.items():
+            try:
+                self.config[param]
+            except:
+                raise Exception(f'组件缺少自定义参数:{param_name}')
+    
+    def check_io_equal(self):
+        input_id = self.get_io_id('in')
+        output_id = self.get_io_id('out')
+        is_equal = input_id == output_id
+        return is_equal
+    
+    
+if __name__ == '__main__':
+    #############################
+    config = {
+        '_PORTS_IN': [], 
+        '_PORTS_OUT': 
+            [
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': ''}], 'name': '','type': 'DF','static': True, 'point_id': 'a'}, 
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': 'P_ND2_Tdb'}], 'name': 'ND2_室外温度', 'type': 'DF', 'static': True, 'point_id': 'b'}, 
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': ''}], 'name': '','point_id': 'c'}, 
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': ''}], 'name': '', 'point_id': 'd'}
+            ], 
+        '_PORTS_IN_GROUP': [],
+        '_PORTS_OUT_GROUP': 
+            [
+                {'id': '1696909849014', 'end': 2, 'name': 'X', 'start': 0, 'static': True}, 
+                {'id': '1696909849434', 'end': 4, 'name': 'Y', 'start': 2, 'static': True}
+            ], 
+            '_CODE'       : None,
+            '_DEVICE_CODE': None
+    }
+    config_info = ConfigInfo(config)
+    
+    df1 = pd.DataFrame({'a':[1,2,3],'b':[4,5,6]})
+    df2 = pd.DataFrame({'c':[7,8,9],'d':[10,11,12]})
+    
+    res1 = config_info.split_df_by_groupinfo(df_map= {'X':df1,'Y':df2})
+    print('res1',res1)
+    
+    ###########################
+    config = {
+        '_PORTS_OUT': [], 
+        '_PORTS_IN': 
+            [
+                {'point_id': 'a'}, 
+                {'point_id': 'b'}, 
+            ], 
+        '_PORTS_IN_GROUP': [],
+        '_PORTS_OUT_GROUP': 
+            [
+            ], 
+            '_CODE'       : None,
+            '_DEVICE_CODE': None
+    }
+    df1 = pd.DataFrame({'x':[1,2,3]})
+    df2 = pd.DataFrame({'y':[1,2,3]})
+    config_info = ConfigInfo(config)
+    res2 = config_info.rename_df([df1,df2])
+    print('res2',res2)

+ 287 - 0
_opt/algorithm/_utils/data_cleaner.py

@@ -0,0 +1,287 @@
+import warnings
+from typing import Union
+from datetime import datetime
+
+import numpy as np
+import pandas as pd 
+from statsmodels.formula.api import rlm
+from scipy.stats import iqr
+
+from .data_summary import summary_dataframe
+
+class DataCleaner:
+    
+    def __init__(self,data:pd.DataFrame,print_process=True) -> None:
+        self.raw_data      = data
+        self.data          = data.copy()
+        self.drop_index    = np.array([False]*len(self.raw_data))
+        self.print_process = print_process
+        
+        if self.print_process:
+            summary_dataframe(df=self.raw_data,df_name='原始数据')
+    
+    def rm_na_and_inf(self):
+        # 删除缺失数据
+        is_na_data      = self.data.isna().any(axis=1).values
+        is_inf_data     = np.any(np.isinf(self.data.values),axis=1)
+        drop_index      = is_na_data | is_inf_data
+        self.drop_index = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_na_and_inf')
+        return self
+    
+    def rm_constant(
+        self,
+        window        :int  = 10,
+        exclude_value :list = None,
+        include_cols  :list = '__ALL__',
+        include_by_re :bool = False,
+        exclude_cols  :list = None
+    ):
+        # 删除常数
+        data              = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        drop_index_matrix = (data.rolling(window=window).std()==0)
+        if exclude_value is not None:
+            for each_value in exclude_value:
+                keep_index_matrix = data.values == each_value
+                drop_index_matrix[keep_index_matrix] = False
+        drop_index        = drop_index_matrix.any(axis=1)
+        self.drop_index   = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_constant',index_matrix=drop_index_matrix,var_name=data.columns)
+        return self 
+
+    def rm_rolling_fluct(
+        self,
+        window        :int             = 10,
+        unit          :Union[str,None] = 'min',
+        fun           :str             = 'ptp',
+        thre          :float           = 0,
+        include_cols  :list            = '__ALL__',
+        include_by_re :bool            = False,
+        exclude_cols  :list            = None
+    ):
+        data = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        
+        if unit is None:
+            roll_window = window
+        else:
+            roll_window = str(window) + unit
+        roll_data = data.rolling(window=roll_window,min_periods=1,center=True) 
+        
+        if fun == 'ptp':
+            res = roll_data.max() - roll_data.min()
+        elif fun == 'pct':
+            res = (roll_data.max() - roll_data.min())/roll_data.min()
+        drop_index_matrix = res>thre
+        drop_index = drop_index_matrix.any(axis=1)
+        self.drop_index = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_rolling_fluct',index_matrix=drop_index_matrix,var_name=data.columns)
+        return self
+    
+    def rm_outlier_rolling_mean(
+        self,
+        window       :int    = 10,
+        thre         :float  = 0.02,
+        include_cols :list   = '__ALL__',
+        include_by_re:bool   = False,
+        exclude_cols :list   = None
+    ):
+        # 删除时序异常
+        data            = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        data            = data.reset_index(drop=True)
+        windows_mean    = data.rolling(window=window,min_periods=1).mean()
+        drop_index      = (((data - windows_mean)/data).abs()>thre).any(axis=1).values
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method='rm_outlier_mean')
+        return self 
+    
+    def rm_diff(
+        self,
+        thre         : float,
+        shift        : int    = 1, 
+        include_cols : list   = '__ALL__',
+        include_by_re: bool   = False,
+        exclude_cols : list   = None
+    ):
+        # shift 等于1时为后一项减前一项
+        data              = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        data_diff         = data.diff(periods=shift,axis=0)
+        drop_index_matrix = data_diff.abs() > thre
+        drop_index        = drop_index_matrix.any(axis=1).values
+        self.drop_index   = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method='rm_diff',index_matrix=drop_index_matrix,var_name=data.columns)
+        return self 
+    
+    def rm_zero(
+        self,
+        include_cols :list  = '__ALL__',
+        include_by_re:bool  = False,
+        exclude_cols :list  = None
+    ):
+        data            = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        drop_index      = (data==0).any(axis=1).values
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method='rm_zero')
+        return self
+        
+    
+    def rm_negative(
+        self,
+        keep_zero     :bool = False,
+        include_cols :list  = '__ALL__',
+        include_by_re:bool  = False,
+        exclude_cols :list  = None
+    ):
+        # 删除负数
+        data = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        if keep_zero is True:
+            drop_index = (data<0).any(axis=1).values
+        else:
+            drop_index = (data<=0).any(axis=1).values
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method='rm_negative')
+        return self
+    
+    def rm_rule(self,remove_rule:str):
+        # 基于规则删除数据
+        data            = self.data.copy()
+        drop_index      = np.array(data.eval(remove_rule))
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method=f'rm_rule({remove_rule})')
+        return self
+
+    def rm_regression_outlier(
+        self,
+        formula     : str,
+        rm_resid_IQR: float                = 1.5,
+        exclude_rule: Union[str,list,None] = None,
+        min_sample  : int                  = 30,
+    ):
+        #! 顺序敏感
+        RAW_INDEX    = np.arange(len(self.data))
+        
+        # 排除以外的数据,不参与计算
+        if exclude_rule is None:
+            exclude_rule = []
+        if isinstance(exclude_rule,str):
+            exclude_rule = [exclude_rule]
+        exclued_index = np.array([False]*len(self.raw_data))
+        for rule in exclude_rule:
+            exclued_index = exclued_index | np.array(self.data.eval(rule))
+        exclued_index      = pd.Series(data=exclued_index,index=RAW_INDEX)
+        exclude_index_drop = pd.Series(self.drop_index,index=RAW_INDEX).loc[exclued_index.values]
+        
+        # 待清洗的数据
+        data_clean   = self.data.assign(RAW_INDEX_=RAW_INDEX).loc[~(self.drop_index|exclued_index.values)]
+        filter_index = data_clean.RAW_INDEX_.values
+        
+        if len(data_clean) < min_sample:
+            return self 
+        
+        with warnings.catch_warnings():
+            warnings.simplefilter('ignore')
+            mod = rlm(formula,data=data_clean).fit(maxiter=500)
+        resid      = np.array(mod.resid)
+        IQR        = iqr(resid)
+        drop_index = (resid < (np.quantile(resid,q=0.25)-rm_resid_IQR*IQR)) | (resid > (np.quantile(resid,q=0.75)+rm_resid_IQR*IQR))
+        
+        drop_index_incomplete = pd.Series(data=drop_index,index=filter_index).combine_first(exclude_index_drop)
+        drop_index_complete   = drop_index_incomplete.reindex(RAW_INDEX).fillna(False).values
+        self.drop_index       = drop_index_complete | self.drop_index
+        self._count_removed_data(index=drop_index,method=f'rm_reg({formula})')
+        return self
+    
+    def rm_date_range(self,start:datetime,end:datetime,col=None):
+        start = pd.Timestamp(start)
+        end   = pd.Timestamp(end)
+        if col is None:
+            ts = pd.to_datetime(self.raw_data.index)
+        else:
+            ts = pd.to_datetime(self.raw_data.loc[:,col])
+        drop_index = (ts>=start) & (ts<=end)
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method=f'rm_date_range({start}~{end})')
+        return self
+    
+    def rm_outrange(
+        self,
+        method        :str   = 'quantile',
+        upper         :float = 0.99,
+        lower         :float = 0.01,
+        include_cols  :list  = '__ALL__',
+        include_by_re :bool  = False,
+        exclude_cols  :list  = None
+    ):
+        data = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        if method == 'quantile':
+            q_upper = np.quantile(data.values,q=upper,axis=0)
+            q_lower = np.quantile(data.values,q=lower,axis=0)
+        elif method == 'raw':
+            q_upper = upper
+            q_lower = lower
+        else:
+            raise Exception('WRONG method')
+        
+        drop_index_matrix = (data > q_upper) | (data < q_lower)
+        drop_index        = drop_index_matrix.any(axis=1)
+        self.drop_index   = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_outrange',index_matrix=drop_index_matrix,var_name=data.columns)
+        
+        return self
+        
+    
+    def get_data(self,fill=None,get_drop=False) -> pd.DataFrame:
+        index = self.drop_index if not get_drop else ~self.drop_index
+        if fill is None:
+            # 保留非删除数据
+            result_data = self.raw_data.loc[~index,:]
+        else:
+            # 填充非删除数据
+            result_data = self.raw_data.copy()
+            result_data.loc[index,:] = fill 
+        if self.print_process: 
+            summary_dataframe(result_data,df_name='结果数据')
+        return result_data
+    
+    def _get_data_by_cols(
+        self,
+        include_cols :list = '__ALL__',
+        include_by_re:bool = False,
+        exclude_cols :list = None,
+    ) -> pd.DataFrame:
+        data = self.data.copy()
+        
+        if include_by_re is True:
+            if isinstance(include_cols,str):
+                cols = data.loc[:,data.columns.str.contains(include_cols,regex=True)].columns
+            else:
+                raise Exception('WRONG')
+            
+        elif include_by_re is False:
+            if include_cols == '__ALL__':
+                cols = data.columns
+            elif isinstance(include_cols,str):
+                cols = [include_cols]
+            elif isinstance(include_cols,list):
+                cols = data.loc[:,include_cols].columns
+            else:
+                raise Exception('WRONG')
+            
+        if exclude_cols is not None:
+            cols = cols.difference(other=exclude_cols)
+        
+        return data.loc[:,cols]
+        
+    
+    def _count_removed_data(self,index,method,index_matrix=None,var_name=None):
+        count = index.sum()
+        pct   = round(count / len(index) * 100,2)
+        if self.print_process:
+            print(f'remove {count}({pct}%) by {method}')
+        
+        if index_matrix is not None and var_name is not None:
+            var_drop_count = np.sum(index_matrix,axis=0)
+            for var,drop_count in zip(var_name,var_drop_count):
+                if drop_count == 0:
+                    continue
+                if self.print_process:
+                    print(f'{var}:{drop_count}')

+ 671 - 0
_opt/algorithm/_utils/data_service.py

@@ -0,0 +1,671 @@
+import json
+import datetime
+import time
+import pandas as pd
+import traceback
+import sys
+import os
+from functools import partial
+from dateutil import tz
+
+try:
+    from workflowlib import requests
+except:
+    import requests
+
+
+urlcfg = {
+    'getpointdata_url'      : "data/getpointdata",
+    'getpointsdata_url'     : "data/getpointsdata",
+    'getpointsdataforai_url': "data/getpointsdataforai",
+    'getpointsruntime_url'  : "data/getpointsruntime",
+    'getcurrdata_url'       : "ai/getcurrdata",
+    'gethisdata_url'        : "ai/gethisdata",
+    'putaidata_url'         : "ai/putaidata",
+    'uploadaifile_url'      : "ai/uploadaifile",
+    'addpointdatum_url'     : "ai/addpointdatum",
+}
+
+
+class PointReader:
+    
+    root_url               = os.environ.get('DATA_ROOT_URL')
+    upload_url             = os.environ.get('DATA_UPLOAD_URL')
+    getpointdata_url       = urlcfg["getpointdata_url"]
+    getpointsdata_url      = urlcfg["getpointsdata_url"]
+    getpointsdataforai_url = urlcfg["getpointsdataforai_url"]
+    getpointsruntime_url   = urlcfg["getpointsruntime_url"]
+    getcurrdata_url        = urlcfg["getcurrdata_url"]
+    gethisdata_url         = urlcfg["gethisdata_url"]
+    putaidata_url          = urlcfg["putaidata_url"]
+    uploadaifile_url       = urlcfg["uploadaifile_url"]
+    addpointdatum_url      = urlcfg["addpointdatum_url"]
+
+    dtfromts = partial(datetime.datetime.fromtimestamp, tz=tz.gettz('Asia/Shanghai'))
+    # 最大连接重试次数,连接失败等待时间
+    max_try    = 10
+    post_sleep = 1
+    
+    def __init__(self,url=None) -> None:
+        if url is not None:
+            self.url = url
+            print(f'使用临时url:{self.url}')
+        elif self.root_url is None:
+            raise Exception('未在环境变量中获取到 DATA_ROOT_URL')
+        else:
+            self.url = self.root_url + self.getpointsdata_url
+    
+    # 传入包含多个point id的list,返回一个dataframe里面包含了ts,point_id,value
+    def get_points_data(self,point_ids, from_time, to_time, interval=1, type_=3, ts2dt_col=None, return_type='dict'):
+        """
+        :param point_ids: list
+        :param from_time: datetime 开始时间
+        :param to_time: datetime 结束时间
+        :param interval: int=1 时间间隔
+        :param type_: =3 后端业务要求
+        :param ts2dt_col: list timestamp需要转换为datetime的列名
+        :param return_type: str in {'dict', 'df', 'dfcol'} default='dict' 指定返回的数据结构
+        'dict' 返回 {point_id: DataFrame} (原始结构)
+        'df' 返回各点位加入 point_id 列再按行拼合后的结果
+        'dfcol' 返回各点位以时间戳为索引,按列拼合,并用 point_id 作为 value 的列名,有 value 时才生效
+        :return: DataFrame
+        """
+        post_data = {
+            "point_ids": point_ids,
+            "begin"    : round(from_time.timestamp()),
+            "end"      : round(to_time.timestamp()),
+            "interval" : interval,
+            "type"     : type_,
+        }
+        rem_try = self.max_try
+        while rem_try > 0:
+            try:
+                resp = requests.post(url=self.url, data=json.dumps(post_data),timeout=60)
+                data = resp.json()['data']
+                if data:
+                    res = dict()
+                    for point in data:
+                        res[point['point_id']] = pd.DataFrame(point['data'])
+                        if ts2dt_col is not None:
+                            res[point['point_id']] = self.ts2dt(res[point['point_id']], ts2dt_col)
+                        # res[point['point_id']].set_index(['ts'], inplace=True)
+                    if return_type == 'dict':
+                        return res
+                    elif return_type == 'df':
+                        for point in res.keys():
+                            res[point]['point_id'] = point
+                        return pd.concat(res.values(), axis=0)
+                    elif return_type == 'dfcol':
+                        res_df = pd.DataFrame()
+                        for point_id, df_ in res.items():
+                            res_df = pd.concat(
+                                [res_df, df_.set_index('ts').rename(columns={'value': point_id})],
+                                axis=1)
+                        return res_df.reset_index()
+                else:
+                    rem_try -= 1
+                    time.sleep(self.post_sleep)
+            except Exception as e:
+                self.error_print(sys._getframe().f_code.co_name)
+                rem_try -= 1
+                time.sleep(self.post_sleep)
+        if rem_try == 0:
+            print("\nget_points_data failed")
+
+    # 内部函数:打印报错信息
+    def error_print(self,func_name):
+        print()
+        print(f"{self.dtfromts(time.time())}:")
+        print(f"function {func_name} error!")
+        print(f"Exception Info:")
+        e_type, e_value, e_traceback = sys.exc_info()
+        print(e_type)
+        print(e_value)
+        traceback.print_tb(e_traceback)
+        print()
+
+    # 内部函数:将timestamp转换成datetime
+    def ts2dt(self,df: pd.DataFrame, cols):
+        for col in cols:
+            df[col] = pd.Series(map(self.dtfromts, df[col]))
+        return df
+
+class PointWriter:
+    upload_url        = os.environ.get('DATA_UPLOAD_URL')
+    addpointdatum_url = urlcfg.get('addpointdatum_url')
+    # 最大连接重试次数,连接失败等待时间
+    max_try    = 10
+    post_sleep = 1
+    
+    def __init__(self,url=None) -> None:
+        if url is not None:
+            self.url = url
+            print(f'使用临时url:{self.url}')
+        elif self.upload_url is None:
+            raise Exception('未在环境变量中获取到 DATA_UPLOAD_URL')
+        else:
+            self.url = self.upload_url + self.addpointdatum_url
+    
+    # 上传数据至点位数据库
+    def ai_add_point_data(self, point_id, ts, value):
+        """
+        :param point_id: str 数据点位,需要预先在点位库中创建好
+        :param timestamp: str 当前时刻的时间 datetime
+        :param value: str 点位数值
+        :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+        """
+        url = self.upload_url + self.addpointdatum_url
+        post_data = [
+            {
+                "point_id": str(point_id),
+                "data":[{"ts": int(ts.timestamp()), "value": str(value)}]
+                }
+            ]
+
+        rem_try = self.max_try
+        while rem_try > 0:
+            try:
+                resp = requests.post(url=url,headers = {'Content-Type': 'application/json; charset=UTF-8'}, data = json.dumps(post_data),timeout=60)
+                state = resp.json()['state']
+                if state == 0:
+                    print(f"\nput {point_id} data success!")
+                    return resp.json()
+                else:
+                    print(f"strange resp: {resp.json()}")
+                    rem_try -= 1
+                    time.sleep(self.post_sleep)
+                resp.close()
+            except Exception as e:
+                self.error_print(sys._getframe().f_code.co_name)
+                rem_try -= 1
+                time.sleep(self.post_sleep)
+        if rem_try == 0:
+            print("\nai_add_point_data failed")
+            return None
+
+    # 内部函数:打印报错信息
+    def error_print(self,func_name):
+        print()
+        print(f"{self.dtfromts(time.time())}:")
+        print(f"function {func_name} error!")
+        print(f"Exception Info:")
+        e_type, e_value, e_traceback = sys.exc_info()
+        print(e_type)
+        print(e_value)
+        traceback.print_tb(e_traceback)
+        print()
+        
+
+       
+# # 内部函数:将datetime转换成timestamp
+# def dt2ts(df: pd.DataFrame, cols):
+#     for col in cols:
+#         df[col] = pd.Series([dt.timestamp() for dt in df[col]])
+#     return df
+
+
+
+
+
+# # 传入一个point id,返回一个dataframe里面包含了ts,point_id,value
+# def get_point_data(point_id, from_time, to_time, interval=1, type_=3, ts2dt_col=None):
+#     """
+#     :param point_id: string
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param interval: int=1 时间间隔
+#     :param type_: =3 后端业务要求
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :return: DataFrame 包含时间
+#     """
+#     url = root_url + getpointdata_url
+#     post_data = {
+#         "point_id": point_id,
+#         "begin": round(from_time.timestamp()),
+#         "end": round(to_time.timestamp()),
+#         "interval": interval,
+#         "type": type_,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             ts_data = resp.json()['data']
+#             if ts_data:
+#                 res = pd.DataFrame(ts_data)
+#                 if ts2dt_col is not None:
+#                     res = ts2dt(res, ts2dt_col)
+#                 # res.set_index(['ts'], inplace=True)
+#                 return res
+#             else:
+#                 rem_try -= 1
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nget_point_data failed")
+
+
+
+# # 传入包含多个point id的list,返回一个dataframe里面包含了ts,多个point_id求总后的value
+# def sum_points_by_ts(point_ids, from_time, to_time, interval=1, type_=3, ts2dt_col=None):
+#     """
+#     :param point_ids: list
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param interval: int=1 时间间隔
+#     :param type_: =3 后端业务要求
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :return: DataFrame
+#     """
+#     try:
+#         data_dict = get_points_data(point_ids, from_time, to_time, interval, type_, ts2dt_col)
+#         for point_id in data_dict.keys():
+#             data_dict[point_id].set_index(['ts'], inplace=True)
+#         return sum(_ for _ in data_dict.values()).reset_index()
+#     except Exception as e:
+#         error_print(sys._getframe().f_code.co_name)
+#         time.sleep(post_sleep)
+#         print("\nsum_points_by_ts failed")
+
+
+# # 传入包含多个point id的list,返回所有point_id最新的一条数据
+# def get_points_run_time(point_ids, ts2dt_col=['ts']):
+#     """
+#     :param point_ids: list
+#     :param ts2dt_col: list=['ts'] timestamp需要转换为datetime的列名
+#     :return: DataFrame
+#     """
+#     url = root_url + getpointsruntime_url
+#     post_data = {
+#         "point_ids": point_ids,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data = resp.json()['data']
+#             if data:
+#                 res = pd.DataFrame(data)
+#                 if ts2dt_col is not None:
+#                     res = ts2dt(res, ts2dt_col)
+#                 return res
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nget_points_run_time failed")
+
+
+# # 获取最新的AI数据
+# def ai_get_curr_data(
+#         model_id, model_version, algo, algo_version, module_id,
+#         ts2dt_col=None,
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :return: datetime, DataFrame 时间戳和数据
+#     """
+#     url = upload_url + getcurrdata_url
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data_resp = resp.json()['data']
+#             if data_resp:
+#                 # res = dict()
+#                 # for row in data_resp:
+#                 #     res[row['ts']] = pd.DataFrame(json.loads(row['data']))
+#                 ts_dt = dtfromts(data_resp[0]['ts'])
+#                 res = pd.DataFrame(json.loads(data_resp[0]['data']))
+#                 if ts2dt_col is not None:
+#                     res = ts2dt(res, ts2dt_col)
+#                 return ts_dt, res
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_get_curr_data failed")
+
+
+# # 获取历史AI数据
+# def ai_get_his_data(
+#         model_id, model_version, algo, algo_version, module_id,
+#         from_time: datetime.datetime,
+#         to_time: datetime.datetime,
+#         ts2dt_col=None,
+#         return_type='df'
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :param return_type: str='df' in {'dict', 'df'} 指定返回的数据结构
+#     'dict' 返回 {ts: DataFrame} (原始结构)
+#     'df' 返回各 DataFrame 加入 ts 时间列再按行拼合后的结果
+#     :return: DataFrame里面包含了ts, value
+#     """
+#     url = upload_url + gethisdata_url
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#         "begin": round(from_time.timestamp()),
+#         "end": round(to_time.timestamp()),
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data_resp = resp.json()['data']
+#             if data_resp:
+#                 res = dict()
+#                 for row in data_resp:
+#                     ts = dtfromts(row['ts'])
+#                     res[ts] = pd.DataFrame(json.loads(row['data']))
+#                     if ts2dt_col is not None:
+#                         res[ts] = ts2dt(res[ts], ts2dt_col)
+#                 if return_type == 'dict':
+#                     return res
+#                 elif return_type == 'df':
+#                     for ts in res.keys():
+#                         res[ts]['ts'] = ts
+#                     return pd.concat(res.values(), axis=0)
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_get_his_data failed")
+
+# def ai_get_his_data2(
+#         model_id, model_version, algo, algo_version, module_id,
+#         from_time: datetime.datetime,
+#         to_time: datetime.datetime,
+#         ts2dt_col=None,
+#         return_type='df'
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :param return_type: str='df' in {'dict', 'df'} 指定返回的数据结构
+#     'dict' 返回 {ts: DataFrame} (原始结构)
+#     'df' 返回各 DataFrame 加入 ts 时间列再按行拼合后的结果
+#     :return: DataFrame里面包含了ts, value
+#     """
+#     url = root_url + gethisdata_url
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#         "begin": round(from_time.timestamp()),
+#         "end": round(to_time.timestamp()),
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data_resp = resp.json()['data']
+#             if data_resp:
+#                 res = dict()
+#                 for row in data_resp:
+#                     ts = dtfromts(row['ts'])
+#                     res[ts] = pd.DataFrame(json.loads(row['data']))
+#                     if ts2dt_col is not None:
+#                         res[ts] = ts2dt(res[ts], ts2dt_col)
+#                 if return_type == 'dict':
+#                     return res
+#                 elif return_type == 'df':
+#                     for ts in res.keys():
+#                         res[ts]['ts'] = ts
+#                     return pd.concat(res.values(), axis=0)
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_get_his_data failed")
+
+
+# # 将 DataFrame 格式数据转换为 jsonlike 的 list 格式数据
+# def df2jsonlike(df: pd.DataFrame):
+#     res = []
+#     for _, row in df.iterrows():
+#         res.append(row.to_dict())
+#     return res
+
+
+# # 将模型所预测的数据存入数据库
+# # 点位数据
+# def ai_put_ai_data(
+#         model_id, model_version, algo, algo_version, module_id,
+#         ts_dt: datetime.datetime,
+#         data_df: pd.DataFrame,
+#         dt2ts_col=None
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts_dt: datetime 业务发生时间,取用时的唯一可用时间戳
+#     :param data_df: DataFrame 业务数据
+#     :param dt2ts_col: list 业务数据中 datetime 列转换为 timestamp
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = upload_url + putaidata_url
+#     if dt2ts_col is not None:
+#         data_df = dt2ts(data_df, dt2ts_col)
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#         "ts": round(ts_dt.timestamp()),
+#         "data": df2jsonlike(data_df),
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {model_id}_{model_version}_{algo}_{algo_version}_{module_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_put_ai_data failed")
+#         return None
+
+
+# # 将模型所预测的数据存入数据库
+# # 能耗基线数据
+# def ai_put_ai_data2(
+#         model_id, model_version, algo, algo_version, module_id,
+#         ts_dt: datetime.datetime,
+#         data_df: pd.DataFrame,
+#         dt2ts_col=None
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts_dt: datetime 业务发生时间,取用时的唯一可用时间戳
+#     :param data_df: DataFrame 业务数据
+#     :param dt2ts_col: list 业务数据中 datetime 列转换为 timestamp
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = root_url + putaidata_url
+#     if dt2ts_col is not None:
+#         data_df = dt2ts(data_df, dt2ts_col)
+#     post_data = {
+#         "model_id"     : model_id,
+#         "model_version": model_version,
+#         "algo"         : algo,
+#         "algo_version" : algo_version,
+#         "module_id"    : module_id,
+#         "ts"           : round(ts_dt.timestamp()),
+#         "data"         : df2jsonlike(data_df) if isinstance(data_df, pd.DataFrame) else data_df,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {model_id}_{model_version}_{algo}_{algo_version}_{module_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_put_ai_data failed")
+#         return None
+
+
+# # 通过接口上传文件
+# def ai_upload_ai_file(
+#         model_id, algo, algo_version, module_id,
+#         file
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts_dt: datetime 业务发生时间,取用时的唯一可用时间戳
+#     :param data_df: DataFrame 业务数据
+#     :param dt2ts_col: list 业务数据中 datetime 列转换为 timestamp
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = upload_url + uploadaifile_url
+#     post_data = {
+#         "model_id"    : str(model_id),
+#         "algo"        : str(algo),
+#         "algo_version": str(algo_version),
+#         "module_id"   : str(module_id),
+#         "file"        : file
+#     }
+
+#     multipart_encoder = MultipartEncoder(
+#         fields=post_data
+#     )
+
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url,headers={'Content-Type': multipart_encoder.content_type}, data=multipart_encoder)
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {model_id}_{algo}_{algo_version}_{module_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#             resp.close()
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_upload_ai_file failed")
+#         return None
+
+
+# # 上传数据至点位数据库
+# def ai_add_point_data(
+#         point_id, ts, value
+# ):
+#     """
+#     :param point_id: str 数据点位,需要预先在点位库中创建好
+#     :param timestamp: str 当前时刻的时间 datetime
+#     :param value: str 点位数值
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = root_url + addpointdatum_url
+#     post_data = [{"point_id": str(point_id),
+#                   "data":[{"ts": int(ts.timestamp()), 
+#                            "value": str(value)}]}]
+
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url,headers = {'Content-Type': 'application/json; charset=UTF-8'}, data = json.dumps(post_data))
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {point_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#             resp.close()
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_add_point_data failed")
+#         return None
+
+

+ 11 - 0
_opt/algorithm/_utils/data_summary.py

@@ -0,0 +1,11 @@
+import pandas as pd 
+
+def summary_dataframe(df:pd.DataFrame,df_name:str):
+    with pd.option_context('display.max_rows', None,'display.max_columns', None,'display.width',500):
+        print('#'*20+f'   Data Summary : {df_name}   '+'#'*20)
+        print(df.describe().round(2).T)
+
+def print_dataframe(df:pd.DataFrame,df_name:str):
+    with pd.option_context('display.max_rows', None,'display.max_columns', None,'display.width',500):
+        print('#'*20+f'   Data : {df_name}   '+'#'*20)
+        print(df)

+ 190 - 0
_opt/algorithm/_utils/datetime_func.py

@@ -0,0 +1,190 @@
+import datetime
+from dateutil import tz
+import time
+from functools import wraps
+import traceback
+import sys
+import threading
+
+
+# 获取带上海时区的当前时间,可指定以天为单位的延时,用于本地历史数据测试
+def get_now(delay_day=0):
+    """
+    :param delay_day: int 表示延时天数,只有测试历史数据时使用
+    :return: datetime
+    """
+    return datetime.datetime.now(tz=tz.gettz('Asia/Shanghai')) - datetime.timedelta(days=delay_day)
+
+
+# decorator 使被修饰任务整点运行,并处理异常
+def timed_exec(intv_timed: datetime.timedelta, round_mode, sleep=60):
+    """
+    被修饰任务的第一个参数必须为 now
+    且修饰后不用再向任务传入该参数,由装饰器向任务传送整点时间
+    :param intv_timed: timedelta 运行的时间间隔
+    :param round_mode: str 时间的取整方式
+    支持 {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :param sleep: int=60 当指定间隔未到,或出现异常,休眠指定时间,以秒为单位
+    :return: function
+    """
+    def timed_exec_deco(func):
+        @wraps(func)
+        def timed_execute_wraps(*args, **kwargs):
+            now = round_time(get_now(), round_mode) - intv_timed
+            while True:
+                try:
+                    now = round_time(time_block(now, intv_timed, sleep=sleep), round_mode)
+                    func(now, *args, **kwargs)
+                except Exception as e:
+                    print()
+                    print(f"{datetime.datetime.now()}:")
+                    print(f"when run {now} task an error occur!")
+                    print(f"Exception Info:")
+                    e_type, e_value, e_traceback = sys.exc_info()
+                    print(e_type)
+                    print(e_value)
+                    traceback.print_tb(e_traceback)
+                    print()
+                    time.sleep(sleep)
+        return timed_execute_wraps
+    return timed_exec_deco
+
+
+# decorator 使被修饰任务整点运行,并处理异常,且告知任务这次和上次的时间
+def timed_exec_last_now(intv_timed: datetime.timedelta, round_mode, bg_last_now='auto', sleep=60):
+    """
+    被修饰任务的前两个参数必须为 now,last_now,分别为本次运行时间和上次运行时间
+    且修饰后不用再向任务传入该参数,由装饰器向任务传送两个时间
+    :param intv_timed: timedelta 运行的时间间隔
+    :param round_mode: str 时间的取整方式
+    支持 {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :param bg_last_now: datetime 第一次执行时给的 last_now
+    default='auto' 自动向前减去 intv_timed 作为 last_now
+    :param sleep: int=60 当指定间隔未到,或出现异常,休眠指定时间,以秒为单位
+    :return: function
+    """
+    def timed_exec_deco(func):
+        @wraps(func)
+        def timed_execute_wraps(*args, **kwargs):
+            if bg_last_now == 'auto':
+                last_now = round_time(get_now(), round_mode) - intv_timed
+            else:
+                last_now = bg_last_now
+            while True:
+                try:
+                    now = round_time(time_block(last_now, intv_timed, sleep=sleep), round_mode)
+                    func(now, last_now, *args, **kwargs)
+                    last_now = now
+                except Exception as e:
+                    print()
+                    print(f"{datetime.datetime.now()}:")
+                    print(f"when run {now} task an error occur!")
+                    print(f"Exception Info:")
+                    e_type, e_value, e_traceback = sys.exc_info()
+                    print(e_type)
+                    print(e_value)
+                    traceback.print_tb(e_traceback)
+                    print()
+                    time.sleep(sleep)
+        return timed_execute_wraps
+    return timed_exec_deco
+
+
+# decorator 使被修饰任务整点运行,并处理异常,且在出错时会打印线程信息
+def timed_exec_multhd(intv_timed: datetime.timedelta, round_mode, sleep=60):
+    """
+    被修饰任务的第一个参数必须为 now
+    且修饰后不用再向任务传入该参数,由装饰器向任务传送整点时间
+    :param intv_timed: timedelta 运行的时间间隔
+    :param round_mode: str 时间的取整方式
+    支持 {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :param sleep: int=60 当指定间隔未到,或出现异常,休眠指定时间,以秒为单位
+    :return: function
+    """
+    def timed_exec_deco(func):
+        @wraps(func)
+        def timed_execute_wraps(*args, **kwargs):
+            now = round_time(get_now(), round_mode) - intv_timed
+            while True:
+                try:
+                    now = round_time(time_block(now, intv_timed, sleep=sleep), round_mode)
+                    func(now, *args, **kwargs)
+                except Exception as e:
+                    thread_curr = threading.currentThread()
+                    print()
+                    print(f"{datetime.datetime.now()}:")
+                    print(f"In {thread_curr.name} (target: {thread_curr._target}, args: {thread_curr._args})")
+                    print(f"when run {now} task an error occur!")
+                    print(f"Exception Info:")
+                    e_type, e_value, e_traceback = sys.exc_info()
+                    print(e_type)
+                    print(e_value)
+                    traceback.print_tb(e_traceback)
+                    print()
+                    time.sleep(sleep)
+        return timed_execute_wraps
+    return timed_exec_deco
+
+
+# 对 datetime 向前取整
+def round_time(t_: datetime.datetime, lvl=None):
+    """
+    :param t_: datetime
+    :param lvl: str=None 取整方式,目前支持
+     {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :return: datetime or "No support"
+    """
+    if lvl is None:
+        return t_
+    round_lvl = "round_" + lvl
+    if round_lvl in globals():
+        return globals()[round_lvl](t_)
+    else:
+        return "No support"
+
+
+def round_sec(t_: datetime.datetime):
+    return t_ - datetime.timedelta(microseconds=t_.microsecond)
+
+
+def round_min(t_: datetime.datetime):
+    return round_sec(t_) - datetime.timedelta(seconds=t_.second)
+
+
+def round_hou(t_: datetime.datetime):
+    return round_min(t_) - datetime.timedelta(minutes=t_.minute)
+
+
+def round_day(t_: datetime.datetime):
+    return round_hou(t_) - datetime.timedelta(hours=t_.hour)
+
+
+def round_10min(t_: datetime.datetime):
+    t_ = round_min(t_)
+    return t_ - datetime.timedelta(minutes=t_.minute % 10)
+
+
+def round_30min(t_: datetime.datetime):
+    t_ = round_min(t_)
+    if t_.minute < 30:
+        return t_ - datetime.timedelta(minutes=t_.minute)
+    else:
+        return t_ - datetime.timedelta(minutes=t_.minute - 30)
+
+
+# 半成品的时间比较函数
+def time_check(t_, st, timed_thre):
+    if t_ - st >= timed_thre:
+        return True
+    else:
+        return False
+
+
+# 阻塞并不断检查,直到当前时间和指定时间的差大于指定间隔时,释放并返回当前时间
+def time_block(t_: datetime.datetime, timed_thre: datetime.timedelta, sleep=1):
+    while True:
+        now = get_now()
+        if now - t_ >= timed_thre:
+            return now
+        else:
+            time.sleep(sleep)

+ 229 - 0
_opt/algorithm/_utils/point_io.py

@@ -0,0 +1,229 @@
+from datetime import datetime
+import time
+
+try:
+    from workflowlib import requests
+except:
+    import requests
+
+import os
+import json
+import numpy as np
+import pandas as pd 
+
+
+
+class PointReader:
+    
+    def __init__(
+        self,
+        point_ids: list,
+        dt_begin : datetime,
+        dt_end   : datetime,
+        url      : str
+    ) -> None:
+    
+        self.point_ids = point_ids
+        self.dt_begin  = dt_begin
+        self.dt_end    = dt_end
+        self.url       = url
+    
+    def read_current(self):
+        post_data = {'point_ids':self.point_ids}
+        try:
+            res = requests.post(url=self.url,data=json.dumps(post_data)).json()
+            res_state = res['state']
+        except:
+            print('url',self.url)
+            print('res',res)
+        
+        if res_state != 0:
+            print('post_data',post_data)
+        
+        if len(res['data']) != 0:
+            point_id    = [_['point_id'] for _ in res['data']]
+            point_value = [_['value'] for _ in res['data']]
+            point_ts    = [_['ts'] for _ in res['data']]
+            
+            data = pd.DataFrame(
+                data    = [point_value],
+                columns = point_id,
+                index   = [point_ts[0]]
+            )
+            data.index = pd.to_datetime(data.index,unit='s',utc=True).tz_convert('Asia/Shanghai')
+            data       = data.tz_localize(tz=None)
+            data.index = data.index.floor('min')
+        else:
+            data = pd.DataFrame(
+                data    = np.zeros(shape=[1,len(self.point_ids)]) * np.nan,
+                columns = self.point_ids,
+                index   = [datetime.now()]
+            )
+            data.index = data.index.floor('min')
+        
+        # 当点位不存在时(包括没入库),此时接口返回的res中,不包含该point_id对应的数据
+        # 因此用所有id进行reindex
+        data = data.reindex(columns=self.point_ids)
+        
+        return data 
+    
+    
+    def _read(self,dt_begin=None,dt_end=None) -> pd.DataFrame:
+        
+        dt_begin = self.dt_begin if dt_begin is None else dt_begin
+        dt_begin = dt_begin.replace(second=0,microsecond=0)
+        dt_end   = self.dt_end if dt_end is None else dt_end
+        dt_end   = dt_end.replace(second=0,microsecond=0)
+        
+        if dt_begin > dt_end:
+            raise Exception('开始时间晚于起始时间')
+        
+        ts_begin  = round(dt_begin.timestamp())
+        ts_end    = round(dt_end.timestamp())
+        
+        post_data = {
+            "point_ids": self.point_ids,
+            "begin"    : ts_begin,
+            "end"      : ts_end,
+            "interval" : 1,
+            "type"     : 3,
+        }
+        
+        try:
+            res = requests.post(url=self.url, data=json.dumps(post_data))
+            time.sleep(0.1)
+            res_state = res.json()['state']
+        except:
+            print('post_data',post_data)
+            print('url',self.url)
+            print('res',res)
+            raise Exception(res.json())
+        
+        if res_state != 0:
+            print('post_data',post_data)
+            raise Exception(res.json())
+        
+        point_df = []
+        for point_info in res.json()['data']:
+            point_id   = point_info['point_id']
+            point_data = point_info['data']
+            
+            if (point_data is None) or (len(point_data) == 0):
+                point_df_index = pd.Index(
+                    data = pd.date_range(start=dt_begin, end=dt_end, freq='1min').to_pydatetime().tolist(), 
+                    name = 'ts'
+                )
+                df = pd.DataFrame({point_id:np.nan},index=point_df_index)
+            else:
+                df       = pd.DataFrame(point_data).rename(columns={'value':point_id}).set_index('ts')
+                df.index = pd.to_datetime(df.index,unit='s',utc=True).tz_convert('Asia/Shanghai')
+                df       = df.tz_localize(tz=None)
+            
+            point_df.append(df)
+
+        check_df_missing(point_df,post_data,res)
+        data = pd.concat(point_df,axis=1).reindex(self.point_ids,axis=1)
+        
+        return data
+
+    def read_interval(self) -> pd.DataFrame:
+        interval = pd.date_range(start=self.dt_begin, end=self.dt_end, freq='1D').to_pydatetime().tolist()
+        interval += [self.dt_end]
+        
+        data = []
+        for idx in range(len(interval)):
+            if idx == len(interval)-1:
+                continue
+            start      = interval[idx]
+            end        = interval[idx+1]
+            finish_pct = round((idx+1)/(len(interval)-1) * 100,2)
+            print(f'获取第{idx}段数据({finish_pct}%),开始时间:{interval[idx]},结束时间:{interval[idx+1]}')
+            
+            data.append( self._read(dt_begin=start,dt_end=end) )
+        data = pd.concat(data,axis=0)
+        data = data.loc[~data.index.duplicated(keep='last'),:].sort_index()
+        
+        return data 
+    
+    def read_int_interval(self,freq='H') -> pd.DataFrame:
+        
+        if freq == 'H':
+            start = self.dt_begin.replace(minute=0)
+            end   = self.dt_end.replace(minute=0)
+        elif freq == 'D':
+            start = self.dt_begin.replace(hour=0,minute=0)
+            end   = self.dt_end.replace(hour=0,minute=0)
+        
+        start        = start.replace(second=0,microsecond=0)
+        end          = end.replace(second=0,microsecond=0)
+        int_interval = pd.date_range(start=start, end=end, freq=freq).to_pydatetime()
+        
+        if len(int_interval) == 0:
+            raise Exception(f'在指定的日期范围下没有获取到对应的时间点(start:{start},end:{end},freq:{freq})')
+        
+        # data = [self._read(dt_begin=dt,dt_end=dt) for dt in int_interval]
+        data = []
+        for idx,dt in enumerate(int_interval):
+            data.append(self._read(dt_begin=dt,dt_end=dt))
+            finish_pct = round((idx+1)/(len(int_interval)) * 100,2)
+            print(f'获取第{idx}段数据({finish_pct}%),时间:{dt}')
+            
+        data = pd.concat(data,axis=0)
+        data = data.loc[~data.index.duplicated(keep='last'),:].sort_index()
+        
+        return data 
+    
+def check_df_missing(point_df,post_data,res) -> None:
+    is_df_nan     = [df.isna().all().all() for df in point_df]
+    is_all_df_nan = all(is_df_nan)
+    is_any_df_nan = any(is_df_nan)
+    if is_all_df_nan or is_any_df_nan:
+        print('post_data',post_data)
+        # print('res.json',res.json())
+    if is_all_df_nan:
+        print('【所有点位】的【所有时间段】数据均未获取到, 需检查点位或接口')
+    if is_any_df_nan:
+        print('【部分点位】的【所有时间段】数据均未获取到, 需检查点位或接口')
+
+
+class PointWriter:
+    
+    def __init__(self) -> None:
+        self.url = f'{os.environ.get("DATA_UPLOAD_URL")}ai/addpointdatum'
+    
+    def ai_add_point_data(
+        self,
+        point_id: str,
+        ts      : list,
+        value   : list
+    ):
+        
+        if len(point_id) == 0:
+            print('无数据写入')
+            return
+        
+        data = []
+        for ts_i,value_i in zip(ts,value):
+            ts_i    = int(ts_i.timestamp())
+            value_i = str(value_i)
+            data.append({'ts':ts_i,'value':value_i})
+        
+        post_data = [{'point_id':point_id,'data':data}]
+        post_data = json.dumps(post_data)
+        
+        resp = requests.post(
+            url     = self.url,
+            headers = {'Content-Type': 'application/json; charset=UTF-8'},
+            data    = post_data
+        )
+        
+        result = resp.json()
+        state  = result['state']
+        if state == 0:
+            print(f"\nput {point_id} data success!")
+        else:
+            print(result)
+            print(post_data)
+            raise Exception('ai_add_point_data failed')
+        
+        return result

+ 185 - 0
_opt/algorithm/_utils/point_reader.py

@@ -0,0 +1,185 @@
+from datetime import datetime
+import time
+
+try:
+    from workflowlib import requests
+except:
+    import requests
+
+import json
+import numpy as np
+import pandas as pd 
+
+
+
+class PointReader:
+    
+    def __init__(
+        self,
+        point_ids: list,
+        dt_begin : datetime,
+        dt_end   : datetime,
+        url      : str
+    ) -> None:
+    
+        self.point_ids = point_ids
+        self.dt_begin  = dt_begin
+        self.dt_end    = dt_end
+        self.url       = url
+    
+    def read_current(self):
+        post_data = {'point_ids':self.point_ids}
+        try:
+            res = requests.post(url=self.url,data=json.dumps(post_data)).json()
+            res_state = res['state']
+        except:
+            print('url',self.url)
+            print('res',res)
+        
+        if res_state != 0:
+            print('post_data',post_data)
+        
+        if len(res['data']) != 0:
+            point_id    = [_['point_id'] for _ in res['data']]
+            point_value = [_['value'] for _ in res['data']]
+            point_ts    = [_['ts'] for _ in res['data']]
+            
+            data = pd.DataFrame(
+                data    = [point_value],
+                columns = point_id,
+                index   = [point_ts[0]]
+            )
+            data.index = pd.to_datetime(data.index,unit='s',utc=True).tz_convert('Asia/Shanghai')
+            data       = data.tz_localize(tz=None)
+            data.index = data.index.floor('min')
+        else:
+            data = pd.DataFrame(
+                data    = np.zeros(shape=[1,len(self.point_ids)]) * np.nan,
+                columns = self.point_ids,
+                index   = [datetime.now()]
+            )
+            data.index = data.index.floor('min')
+        
+        # 当点位不存在时(包括没入库),此时接口返回的res中,不包含该point_id对应的数据
+        # 因此用所有id进行reindex
+        data = data.reindex(columns=self.point_ids)
+        
+        return data 
+    
+    
+    def _read(self,dt_begin=None,dt_end=None) -> pd.DataFrame:
+        
+        dt_begin = self.dt_begin if dt_begin is None else dt_begin
+        dt_begin = dt_begin.replace(second=0,microsecond=0)
+        dt_end   = self.dt_end if dt_end is None else dt_end
+        dt_end   = dt_end.replace(second=0,microsecond=0)
+        
+        if dt_begin > dt_end:
+            raise Exception('开始时间晚于起始时间')
+        
+        ts_begin  = round(dt_begin.timestamp())
+        ts_end    = round(dt_end.timestamp())
+        
+        post_data = {
+            "point_ids": self.point_ids,
+            "begin"    : ts_begin,
+            "end"      : ts_end,
+            "interval" : 1,
+            "type"     : 3,
+        }
+        
+        try:
+            res = requests.post(url=self.url, data=json.dumps(post_data))
+            time.sleep(0.1)
+            res_state = res.json()['state']
+        except:
+            print('post_data',post_data)
+            print('url',self.url)
+            print('res',res)
+            raise Exception(res.json())
+        
+        if res_state != 0:
+            print('post_data',post_data)
+            raise Exception(res.json())
+        
+        point_df = []
+        for point_info in res.json()['data']:
+            point_id   = point_info['point_id']
+            point_data = point_info['data']
+            
+            if (point_data is None) or (len(point_data) == 0):
+                point_df_index = pd.Index(
+                    data = pd.date_range(start=dt_begin, end=dt_end, freq='1min').to_pydatetime().tolist(), 
+                    name = 'ts'
+                )
+                df = pd.DataFrame({point_id:np.nan},index=point_df_index)
+            else:
+                df       = pd.DataFrame(point_data).rename(columns={'value':point_id}).set_index('ts')
+                df.index = pd.to_datetime(df.index,unit='s',utc=True).tz_convert('Asia/Shanghai')
+                df       = df.tz_localize(tz=None)
+            
+            point_df.append(df)
+
+        check_df_missing(point_df,post_data,res)
+        data = pd.concat(point_df,axis=1).reindex(self.point_ids,axis=1)
+        
+        return data
+
+    def read_interval(self) -> pd.DataFrame:
+        interval = pd.date_range(start=self.dt_begin, end=self.dt_end, freq='1D').to_pydatetime().tolist()
+        interval += [self.dt_end]
+        
+        data = []
+        for idx in range(len(interval)):
+            if idx == len(interval)-1:
+                continue
+            start      = interval[idx]
+            end        = interval[idx+1]
+            finish_pct = round((idx+1)/(len(interval)-1) * 100,2)
+            print(f'获取第{idx}段数据({finish_pct}%),开始时间:{interval[idx]},结束时间:{interval[idx+1]}')
+            
+            data.append( self._read(dt_begin=start,dt_end=end) )
+        data = pd.concat(data,axis=0)
+        data = data.loc[~data.index.duplicated(keep='last'),:].sort_index()
+        
+        return data 
+    
+    def read_int_interval(self,freq='H') -> pd.DataFrame:
+        
+        if freq == 'H':
+            start = self.dt_begin.replace(minute=0)
+            end   = self.dt_end.replace(minute=0)
+        elif freq == 'D':
+            start = self.dt_begin.replace(hour=0,minute=0)
+            end   = self.dt_end.replace(hour=0,minute=0)
+        
+        start        = start.replace(second=0,microsecond=0)
+        end          = end.replace(second=0,microsecond=0)
+        int_interval = pd.date_range(start=start, end=end, freq=freq).to_pydatetime()
+        
+        if len(int_interval) == 0:
+            raise Exception(f'在指定的日期范围下没有获取到对应的时间点(start:{start},end:{end},freq:{freq})')
+        
+        # data = [self._read(dt_begin=dt,dt_end=dt) for dt in int_interval]
+        data = []
+        for idx,dt in enumerate(int_interval):
+            data.append(self._read(dt_begin=dt,dt_end=dt))
+            finish_pct = round((idx+1)/(len(int_interval)) * 100,2)
+            print(f'获取第{idx}段数据({finish_pct}%),时间:{dt}')
+            
+        data = pd.concat(data,axis=0)
+        data = data.loc[~data.index.duplicated(keep='last'),:].sort_index()
+        
+        return data 
+    
+def check_df_missing(point_df,post_data,res) -> None:
+    is_df_nan     = [df.isna().all().all() for df in point_df]
+    is_all_df_nan = all(is_df_nan)
+    is_any_df_nan = any(is_df_nan)
+    if is_all_df_nan or is_any_df_nan:
+        print('post_data',post_data)
+        # print('res.json',res.json())
+    if is_all_df_nan:
+        print('【所有点位】的【所有时间段】数据均未获取到, 需检查点位或接口')
+    if is_any_df_nan:
+        print('【部分点位】的【所有时间段】数据均未获取到, 需检查点位或接口')

+ 46 - 0
_opt/algorithm/_utils/wf_cache.py

@@ -0,0 +1,46 @@
+from datetime import datetime
+from workflowlib.utils import cache as workflow_cache
+
+class WfCache:
+    
+    datetime_fmt = "%Y-%m-%d %H:%M:%S"
+    
+    def __init__(self) -> None:
+        return
+    
+    def convert_data_to_str(self,data):
+        if isinstance(data,(str,int,float)):
+            str_data = str(data)
+        elif isinstance(data,datetime):
+            str_data = datetime.strftime(data,self.datetime_fmt)
+        elif data is None:
+            str_data = ''
+        
+        return str_data
+    
+    def convert_str_to_data(self,str_data,data_type):
+        if str_data == '':
+            data = None
+            return data
+        if str_data is None:
+            return None
+        
+        if data_type == 'int':
+            data = int(str_data)
+        elif data_type == 'float':
+            data = float(str_data)
+        elif data_type == 'str':
+            data = str_data
+        elif data_type == 'datetime':
+            data = datetime.strptime(str_data,self.datetime_fmt)
+            
+        return data
+    
+    def read(self,key,data_type):
+        str_data = workflow_cache.get(key)
+        data = self.convert_str_to_data(str_data,data_type)
+        return data
+    
+    def write(self,key,data):
+        str_data = self.convert_data_to_str(data)
+        workflow_cache.set(key,str_data)

+ 134 - 0
_opt/algorithm/_utils/wf_file_io.py

@@ -0,0 +1,134 @@
+import os 
+
+import numpy as np
+from typing import Union
+from datetime import datetime
+
+import pandas as pd 
+
+DIR_PATH = '/mnt/workflow_data'
+
+def write_file(
+    data      : Union[pd.DataFrame,dict,None],
+    file_name : str,
+    keep_n    : Union[int,None] = None,
+    keep_index: bool = True,
+) -> None:
+    
+    def write(
+        file_path: str,
+        df       : pd.DataFrame,
+        time     : datetime
+    ):
+        if keep_index:
+            df = df.rename_axis(index='_raw_index').reset_index(drop=False)
+        else:
+            df = df.reset_index(drop=True)
+        
+        current_df = (
+            df
+            .assign(_index=np.arange(len(df)))
+            .melt(id_vars='_index',var_name='variable',value_name='values')
+            .assign(write_ts=time)
+        )
+        
+        if os.path.exists(file_path):
+            exist_df = pd.read_csv(file_path)
+            save_df  = pd.concat([exist_df,current_df],axis=0)
+        else:
+            save_df = current_df
+        
+        save_df.write_ts = pd.to_datetime(save_df.write_ts)
+        
+        if isinstance(keep_n,int):
+            keep_time = save_df.write_ts.sort_values().drop_duplicates().nlargest(keep_n).nsmallest(1).iat[0]
+            save_df   = save_df.loc[lambda dt:dt.write_ts>= keep_time]
+            print(f'仅保留{keep_time}以后的数据')
+        
+        save_df.to_csv(file_path,index=False)
+        print(f'文件已保存:{file_path}')
+    
+    now = datetime.now()
+    
+    if isinstance(data,pd.DataFrame):
+        file_path = get_file_path(file_name=file_name,data_name=None)
+        write(file_path=file_path,df=data,time=now)
+    
+    elif isinstance(data,dict):
+        for df_name,df in data.items():
+            if not isinstance(df,pd.DataFrame):
+                continue
+            file_path = get_file_path(file_name=file_name,data_name=df_name)
+            write(file_path=file_path,df=df,time=now)
+        
+    elif data is None:
+        return None
+    
+    else:
+        raise Exception('Wrong Result Type')
+
+def write_file_simple(data:pd.DataFrame,file_name:str,path_name=None,overwrite=False):
+    path_name = path_name or DIR_PATH
+    file_path = f'{path_name}/{file_name}.csv'
+    if os.path.exists(file_path) and not overwrite:
+        exist_df = pd.read_csv(file_path)
+        save_df  = pd.concat([exist_df,data],axis=0)
+    else:
+        save_df = data
+    save_df.to_csv(file_path,index=False)
+
+def read_file(
+    file_name,
+    data_name,
+    file_path      = None,
+    write_ts_start = None,
+    write_ts_end   = None,
+) -> pd.DataFrame:
+    if file_path is None:
+        file_path = get_file_path(file_name=file_name,data_name=data_name)
+    data = (
+        pd.read_csv(file_path)
+        .assign(write_ts=lambda dt:pd.to_datetime(dt.write_ts))
+        .pivot(index=['write_ts','_index'],columns='variable',values='values')
+        .reset_index(level=1,drop=True)
+        .rename_axis(columns=None)
+    )
+    write_ts_start = data.index.min() if write_ts_start is None else write_ts_start
+    write_ts_end   = data.index.max() if write_ts_end is None else write_ts_end
+    data = (
+        data[write_ts_start:write_ts_end]
+        .reset_index(drop=False)
+        .set_index(['_raw_index','write_ts'])
+    )
+    return data 
+
+
+def get_file_path(file_name,data_name) -> str:
+    wf_type  = get_workflow_type()
+    if data_name is None:
+        file_path = f'{DIR_PATH}/{file_name}_{wf_type}.csv'
+    else:
+        folder_name = f'{file_name}_{wf_type}'
+        file_path   = f'{DIR_PATH}/{folder_name}/{data_name}.csv'
+        # 检查指定父目录下是否存在某个文件夹,如果不存在则创建它
+        folder_path = os.path.join(DIR_PATH, folder_name)
+        if not os.path.exists(folder_path):
+            os.makedirs(folder_path)
+    return file_path
+        
+def get_workflow_type() -> str:
+    # SIMULATOR_JOB_ID           仿真环境
+    # MONITOR_TYPE               监控环境
+    # OPTIM_CALC_LOG_ID          优化回算
+    # OPTIM_JOB_ID               实时优化环境
+    # JOB_SCHEDULE_CALC_TIME_NOW 任务计划
+    # WORKFLOW_DEBUG             调试环境
+    
+    wf_type = 'UNKNOW'
+    for each_type,each_type_code in zip(
+        ['SIMULATOR_JOB_ID','OPTIM_CALC_LOG_ID','OPTIM_JOB_ID','JOB_SCHEDULE_CALC_TIME_NOW','WORKFLOW_DEBUG'],
+        ['FZ','HS','YH','RW','TS']
+    ):
+        if each_type in os.environ:
+            wf_type = each_type_code
+    return wf_type

+ 67 - 0
_opt/algorithm/constrains/build.py

@@ -0,0 +1,67 @@
+from .._utils.config_info import ConfigInfo
+
+def main(config:dict) -> list:
+    
+    config_info = ConfigInfo(config)
+    constrains_range = config_info.get_property('constrains_range',default=None)
+    constrains_dyn   = config_info.get_property('constrains_dyn',default=None)
+    constrains_fml   = config_info.get_property('constrains',default=None)
+    
+    all_constrains = []
+    all_constrains += get_range_constrains(constrains_range)
+    all_constrains += get_dyn_change_constrains(constrains_dyn)
+    all_constrains += get_formula_constrains(constrains_fml)
+    
+    print('-'*20+'约束条件'+'-'*20)
+    for idx,constrains in enumerate(all_constrains):
+        print('-'*20+ str(idx) +'-'*20)
+        print(constrains)
+
+    return all_constrains
+
+def get_range_constrains(range_constrains:list):
+    constrains = []
+    
+    if range_constrains is None:
+        return constrains
+    
+    for obj_var in range_constrains:
+        var_name_raw = obj_var['var_name']
+        var_name = '{' + var_name_raw + '}'
+        
+        ub = obj_var.get('ub')
+        lb = obj_var.get('lb')
+        
+        if ub is not None and ub != '':
+            constrains.append(f'#表单边界性约束# {var_name}-({ub})<0')
+        if lb is not None and lb != '':
+            constrains.append(f'#表单边界性约束# ({lb})-{var_name}<0')
+        
+    return constrains
+
+def get_dyn_change_constrains(config):
+    
+    # (以下变化是指优化前后的变化)
+    # 目标变量 变化值  d_p 
+    # 目标状态 值      S   (0/1)
+    # 环境状态 变化值  d_S (0/1)
+    
+    # 当环境点位出现变化时,允许目标点位有大范围的变化(c_w),否则只允许小范围变化(c_n)
+    # d_S * ( c_w - c_n ) + c_n  < d_p < d_S * ( c_w - c_n ) + c_n
+    
+    # 当目标点位状态为0时,允许目标点位有最大范围的变化,从而减小约束对于可行解数量的负面影响
+    # D_s = d_S + ( 1 - S ) * 100
+    # D_s * ( c_w - c_n ) + c_n  < d_p < D_s * ( c_w - c_n ) + c_n
+    
+    
+    
+    return []
+
+def get_formula_constrains(constrains_fml:list):
+    constrains = []
+    if constrains_fml is None:
+        return constrains
+    
+    constrains = [cst['constrain'] for cst in constrains_fml if cst is not None]
+    
+    return constrains

+ 51 - 0
_opt/algorithm/constrains/execute.py

@@ -0,0 +1,51 @@
+import re 
+import numpy as np
+import pandas as pd
+
+def execute_constrains(data:pd.DataFrame,cst:str) -> np.ndarray:
+    
+    #######################################################################################
+    # 以下函数用于execute_constrains中使用                                                 #
+    #######################################################################################
+    
+    def threshold_min(*data,threshold):
+        # 原数据中,所有大于threshold的数的最小值
+        data     = np.column_stack(data)
+        data_rm  = np.where(data<=threshold,np.nan,data)
+        data_min = np.nanmin(data_rm,axis=1)
+        data_min = np.where(np.isnan(data_min),threshold,data_min)
+        return data_min
+    
+    def max(*data):
+        return np.max(data,axis=0)
+
+    #######################################################################################
+    #######################################################################################
+    
+    mod_vars = re.findall(r'{(\w+)}',cst)
+    if len(mod_vars) > 0:
+        for var in mod_vars:
+            cst = cst.replace('{'+var+'}',f'data.{var}')
+        
+        try:
+            cv = eval(cst)
+        except Exception as E:
+            print(f'执行该约束时有误:{cst},以下为输入的数据')
+            with pd.option_context('display.max_columns', None):
+                print(data)
+            raise E
+        
+        cv = np.array(cv)
+    else:
+        cv = data.eval(cst).to_numpy()
+        
+    cv = cv.reshape(-1,1)
+    
+    return cv 
+
+if __name__ == '__main__':
+    res1 = execute_constrains(
+        pd.DataFrame({'x':[1,2,3],'y':[4,5,6]}),
+        cst='abs(x-y)'
+    )
+    print(res1)

+ 98 - 0
_opt/algorithm/constrains/parse.py

@@ -0,0 +1,98 @@
+import re
+import numpy as np
+import pandas as pd
+
+def parse_constrains(constrains:list,data:pd.DataFrame) -> list:
+    """
+    # 解析约束字符中的特殊符号,生成可以执行的约束条件
+    
+    特殊符号:
+        - [abc]  表示模型初始值,同时包含模型的输入和输出
+        - <1~10> 表示多个点位
+
+    Parameters
+    ----------
+    constrains : list
+        原始的约束条件
+    data : pd.DataFrame
+        约束条件对应的数据
+
+    Returns
+    -------
+    list
+        解析后的约束
+
+    Raises
+    ------
+    Exception
+        - 当约束条件不以“<0”结尾
+        - 约束条件包含多余的'#'
+        - 未从数据中找到指定的变量
+    """
+    
+    if constrains is None:
+        return None
+    
+    if data.columns.has_duplicates:
+        col     = data.columns
+        dup_col = col[col.duplicated()]
+        raise Exception(f'约束的变量中存在重复名称,需检查系统模型的输入及输出,以及外部变量{dup_col}')
+    
+    constrains = multiple_constrains(constrains)
+    
+    parsed_con = []
+    for con in constrains:
+        
+        # 将注释从文本中移除
+        con = con.replace(' ','')
+        chinese_str = r'\u3002\uff1b\uff0c\uff1a\u201c\u201d\uff08\uff09\u3001\uff1f\u300a\u300b'
+        all_comment = re.findall(f'#[\w{chinese_str}]+#',con)
+        for comment in all_comment:
+            con = con.replace(comment,'')
+        if '#' in con:
+            raise Exception(f'检测到多余的符号"#", 检查约束条件是否正确( {con} )')
+        
+        # 校验约束合规性
+        if con[-2:] != '<0':
+            raise Exception(f'检测到无效的约束条件{con},约束条件应以<0结尾,修改成"{con}<0"')
+        con = con.replace('<0','')
+        
+        # 解析约束中的数值 (大小写敏感)
+        current_var = re.findall('\[(\w+)\]',con)
+        for var in current_var:
+            
+            # 检查字段是否存在于数据中
+            current_data = data.reset_index(drop=True)
+            current_col  = current_data.columns.to_list()
+            if var not in current_col:
+                raise Exception(f'系统模型的输入和输出中未能找到{var},以下是当前系统模型的参数{current_col}')
+            
+            current_value = current_data.at[0,var]
+            if np.isnan(current_value):
+                raise Exception(f'约束变量{var}的约束值为缺失值,请检查系统模型的输出')
+            
+            con = con.replace(f'[{var}]',str(current_value))
+        
+        parsed_con.append(con)
+        
+    return parsed_con
+
+def multiple_constrains(constrains:list):
+    result_constrains = []
+    
+    for con in constrains:
+        
+        num_range = list(set(re.findall('<\d+~\d+>',con)))
+        if len(num_range) == 0:
+            result_constrains.append(con)
+            continue
+        elif len(num_range) > 1:
+            raise Exception('检查到存在多个range模式')
+        
+        num_start,num_end = re.findall('<(\d+)~(\d+)>',num_range[0])[0]
+        num_start = int(num_start)
+        num_end   = int(num_end)
+        for i in range(num_start,num_end+1):
+            result_constrains.append(con.replace(num_range[0],str(i)))
+    
+    return result_constrains

+ 256 - 0
_opt/algorithm/main.py

@@ -0,0 +1,256 @@
+from typing import Union
+
+import numpy as np
+import pandas as pd
+
+from .opt_obj import Opt
+from .opt_alg import main as opt_alg_main
+from .constrains.build import main as build_all_constrains
+from ._utils.config_info import ConfigInfo
+from .model.diagnosis import ModelDiag
+
+def main(*args, system_model=None, config=None):
+    config_info = ConfigInfo(config=config)
+    
+    group_data  = config_info.get_io_group_info(io='in',type='data',data=args)
+    group_point = config_info.get_io_group_info(io='in',type='point_id')
+    check_var_data(group_data)   # 校验数据的维度
+    
+    # 组件输入一:模型变量
+    group_opt_var = group_data.get('优化变量')
+    group_sys_var = group_data.get('系统变量')
+    group_oth_var = group_data.get('其他变量')
+    
+    group_opt_col = group_point.get('优化变量')
+    group_sys_col = group_point.get('系统变量')
+    group_oth_col = group_point.get('其他变量')
+    
+    # 组件输入二:外部变量
+    group_cst_var = group_data.get('外部变量')
+    group_cst_col = group_point.get('外部变量')
+    
+    # 组件输入三:动态配置项
+    group_dyn_cfg = group_data.get('动态配置',[None])
+    allow_neg_opt = group_dyn_cfg[0]  # 是否需要校验负优化
+    
+    # 初始化边界条件
+    boundary,boundary_insert = get_boundary(group_opt_var)
+    lb,ub,var_type,var_precis,collection_map,var_map = init_boundary(boundary)
+    
+    opt_var = get_opt_var(group_opt_var,group_opt_col)
+    sys_var = get_sys_var(group_sys_var,group_sys_col)
+    oth_var = get_oth_var(group_oth_var,group_oth_col,index=opt_var.index)
+    cst_var = get_cst_var(group_cst_var,group_cst_col,index=opt_var.index)
+    
+    # 初始化可行性约束,动态可行性约束需要提前解析
+    constrains  = build_all_constrains(config)
+    
+    # 系统模型
+    if system_model is None:
+        from .model.model import main as get_system_model
+        system_model = get_system_model(config)
+    
+    # 优化对象
+    opt_object = Opt(
+        lb              = lb,
+        ub              = ub,
+        varTypes        = var_type,
+        var_precis      = var_precis,
+        maxormins       = 1 if config['dir_min'] is True else -1,
+        collection_map  = collection_map,
+        var_map         = var_map,
+        boundary_insert = boundary_insert,
+        opt_var         = opt_var,
+        sys_var         = sys_var,
+        system_model    = system_model,
+        oth_var         = oth_var,
+        cst_var         = cst_var,
+        opt_target      = config['target'],
+        constrains      = constrains
+    )
+    
+    # 系统模型诊断
+    diag = config.get('diag_model',True)
+    if diag == True:
+        model_diag = ModelDiag(
+            model = system_model,
+            data  = opt_object.current_system_x,
+            c_var = opt_object.name_opt_vars_c,
+            d_var = 1,
+        )
+        model_diag.summary_diag()
+    
+    # 优化算法
+    # 是否允许负优化,组件的配置或输入中任意一处允许负优化,则不检查负优化
+    allow_neg_opt_config = config_info.get_property('allow_neg_opt',default=False)
+    allow_neg_opt        = any([allow_neg_opt,allow_neg_opt_config])
+    result_list          = opt_alg_main(opt_obj = opt_object, allow_neg_opt = allow_neg_opt, config =config)
+    
+    # 输出中增加系统模型的初始输出(对应系统模型的初始输入)
+    output_groups = config_info.get_io_group_info(io='out',type='point_id')
+    result_list   = result_append_init_sys_y(result_list,output_groups,opt_object)
+    
+    return result_list
+
+
+def check_var_data(group_data) -> None:
+    for name,data in group_data.items():
+        if not isinstance(data,pd.DataFrame):
+            continue
+        if data.shape[0] == 1:
+            raise Exception(f'{name}的数据行数不等于1,不满足要求,数据为{data}')
+
+def get_opt_var(group_opt_var:list,group_opt_col:list) -> pd.DataFrame:
+    opt_var = []
+    for bound_info in group_opt_var:
+        opt_var.append(bound_info['cur_val'])
+    opt_var = pd.concat(opt_var,axis=1)
+    opt_var.columns = group_opt_col
+    return opt_var
+
+def get_sys_var(group_sys_var:list,group_sys_col:list) -> Union[pd.DataFrame,None]:
+    if group_sys_var is None or len(group_sys_var)==0:
+        print('【警告】未获取到"系统变量",确保实际不存在该变量 ')
+        sys_var = None
+    else:
+        group_sys_var   = [df.iloc[:,[0]] for df in group_sys_var]
+        sys_var         = pd.concat(group_sys_var,axis=1)
+        sys_var.columns = group_sys_col
+    return sys_var
+
+def get_oth_var(group_oth_var:list,group_oth_col:list,index) -> Union[pd.DataFrame,None]:
+    
+    oth_var_exist = group_oth_col is not None and len(group_oth_var) > 0
+    
+    if oth_var_exist:
+        is_oth_var_df = all([isinstance(df,pd.DataFrame) for df in group_oth_var])
+    
+    if not oth_var_exist:
+        # print('【警告】未获取到"其他变量",确保实际不存在该变量 ')
+        oth_var = None
+        
+    elif is_oth_var_df and oth_var_exist:
+        group_oth_var   = [df.iloc[:,[0]] for df in group_oth_var]
+        oth_var         = pd.concat(group_oth_var,axis=1)
+        oth_var.columns = group_oth_col
+    
+    elif (not is_oth_var_df) and oth_var_exist:
+        print('【警告】未获取到有效的"其他变量",用0值代替所有该类变量 ')
+        oth_var = pd.DataFrame(
+            data    = np.zeros_like(group_oth_col,dtype='float').reshape(1,-1),
+            columns = group_oth_col,
+            index   = index
+        )
+    
+    return oth_var
+
+def get_cst_var(group_cst_var:list,group_cst_col:list,index) -> Union[pd.DataFrame,None]:
+    
+    group_cst_var = None if (group_cst_var is None) or (len(group_cst_var)==0) else group_cst_var
+    group_cst_col = None if (group_cst_col is None) or (len(group_cst_col)==0) else group_cst_col
+    
+    if group_cst_var is None or len(group_cst_var) == 0:
+        # print('【警告】未获取到"外部变量",确保实际不存在该变量 ')
+        return None
+    
+    new_group_cst_var = []
+    
+    # 校验数据类型,并重置Index(对齐)和Columns(便于引用)
+    for var in group_cst_var:
+        if not isinstance(var,pd.DataFrame):
+            raise Exception(f'外部变量中存在错误的数据类型{type(var)}({var})')
+        if var.shape[0] != 1:
+            raise Exception(f'外部变量的数据形状有误{var.shape}(var)')
+        var.index = index 
+        new_group_cst_var.append(var.iloc[:,[0]])
+    
+    cst_var         = pd.concat(new_group_cst_var,axis=1)
+    cst_var.columns = group_cst_col
+        
+    return cst_var
+
+def get_boundary(group_opt_var:list) -> dict:
+    boundary        = {}  # {'id1':boundary_info1 , 'id2':boundary_info2}
+    boundary_insert = {}  # {0:0, 1:2, 3:1} 0位置插入0条,1位置插入2条,3位置插入1条
+    
+    print('-'*20+'边界条件'+'-'*20)
+    
+    pos_group_id = -1
+    for idx,bound_info in enumerate(group_opt_var):
+        
+        print('-'*20+ str(idx) +'-'*20)
+        for k,v in bound_info.items():
+            v = v.iat[0,0] if isinstance(v,pd.DataFrame) else v
+            print(f'{k} : {v} \n')
+        
+        if bound_info['id'] not in boundary.keys():
+            pos_group_id += 1
+            # 多个优化变量属于一个组内时(相同id),所有边界的信息以该组的id变量为准
+            boundary[bound_info['id']]    = bound_info  # boundary 仅保留唯一的id
+            boundary_insert[pos_group_id] = 0           # 插入的位置:插入的数量
+        
+        else:
+            boundary_insert[pos_group_id] += 1
+    
+    return boundary,boundary_insert
+
+def init_boundary(all_boundary) -> tuple:
+    # 生成边界
+    lb             = [] # 所有类型边界
+    ub             = [] # 所有类型边界
+    var_type       = [] # 所有类型边界
+    var_precis     = [] # 区间类边界
+    collection_map = {} # 集合类边界/映射类边界 {'u1_name':{0:u1_a, 1:u1_b}}
+    var_map        = {} # 映射类边界           {'u1_name':{0:x1_a, 1:x1_b}}
+    
+    for boundary_id,boundary_info in all_boundary.items():
+        boundary_type = boundary_info.get('boundary_type')
+        var_name = boundary_info.get('var_name')
+        
+        # 区间类型的边界
+        if boundary_type == 'interval':
+            lb.append(boundary_info.get('lb'))
+            ub.append(boundary_info.get('ub'))
+            var_type.append(boundary_info.get('var_type'))
+            var_precis.append(boundary_info.get('var_precis'))
+        
+        # 集合类型的边界
+        elif boundary_type == 'collection':
+            # 序号对应的数值也应该从小到大排序
+            collection = boundary_info.get('collection')
+            lb.append(0)
+            ub.append(len(collection)-1)
+            var_type.append(1)
+            var_precis.append(0)
+            collection_map[var_name] = dict(enumerate(collection))
+        
+        # 映射类型边界
+        elif boundary_type == 'map':
+            collection = boundary_info.get('collection')
+            lb.append(0)
+            ub.append(len(collection)-1)
+            var_type.append(1)
+            var_precis.append(0)
+            collection_map[var_name] = dict(enumerate(collection))
+            var_map[var_name] = boundary_info.get('var_map')
+        
+        else:
+            raise ValueError('边界类型有误')
+        
+    return lb,ub,var_type,var_precis,collection_map,var_map
+
+def result_append_init_sys_y(result:list,output_groups:dict,opt_obj:Opt):
+    if '初始输出' not in output_groups.keys():
+        return result
+    if len(output_groups['初始输出']) == 0:
+        return result
+    
+    init_sys_y = []
+    for y_col in output_groups['初始输出']:
+        #TODO 报错时提示
+        sys_y = opt_obj.current_system_y.loc[:,[y_col]]
+        init_sys_y.append(sys_y)
+    #TODO 放在最后可能不太合适,最好能指定
+    
+    final_result = result + init_sys_y
+    return final_result

+ 50 - 0
_opt/algorithm/model/diagnosis.py

@@ -0,0 +1,50 @@
+import pandas as pd
+
+from .model import SystemModel
+
+class ModelDiag:
+    def __init__(
+        self,
+        model: SystemModel,
+        data : pd.DataFrame,
+        c_var: list,
+        d_var: list,
+    ) -> None:
+        self.model = model 
+        self.data = data.iloc[[0],:]
+        self.c_var = c_var
+        self.d_var = d_var
+    
+    def summary_diag(self):
+        print('-'*20+'系统模型诊断'+'-'*20)
+        with pd.option_context('display.max_rows', None,'display.max_columns', None,'display.width',500):
+            derivate_c_info = self.diag_c_var_derivate()
+            print('-'*20+'系统连续变量偏导'+'-'*20)
+            print(derivate_c_info)
+    
+    def diag_c_var_derivate(self) -> pd.DataFrame:
+            
+        derivate_1 = self.model.predict_derivate_1(data=self.data,vars=self.c_var)
+        derivate_2 = self.model.predict_derivate_2(data=self.data,vars=self.c_var)
+        
+        derivate_1 = process_derivate(derivate_1,'D1')
+        derivate_2 = process_derivate(derivate_2,'D2')
+        derivate   = derivate_1.join(derivate_2).round(4)
+        
+        return derivate
+        
+    
+    def diag_d_var_change(self) -> pd.DataFrame:
+        ...
+        
+
+def process_derivate(df:pd.DataFrame,D_name):
+    df = (
+        df
+        .reset_index()
+        .assign(__VAR__SAMPLE=lambda dt:dt.__VAR__SAMPLE.str.extract('__(.+)__'))
+        .melt(id_vars='__VAR__SAMPLE',var_name='SYS_OUTPUT',value_name=D_name)
+        .rename(columns={'__VAR__SAMPLE':'OPT_VAR'})
+        .set_index(['SYS_OUTPUT','OPT_VAR'])
+    )
+    return df 

+ 145 - 0
_opt/algorithm/model/model.py

@@ -0,0 +1,145 @@
+import time
+
+import numpy as np
+import pandas as pd
+
+from .._utils.data_summary import summary_dataframe
+
+try:
+    from executor import exec_workflow
+    from workflowlib.utils import queryset
+except:
+    pass
+
+def main(config=None):
+    model_name  = config['model_name']
+    model_code  = config['model_code']
+    input_code  = config['input_code']
+    output_code = config['output_code']
+    get_mod_io  = config.get('get_mod_io',True)
+    model = SystemModel(model_name=model_name,model_code=model_code,input_code=input_code,output_code=output_code,get_mod_io=get_mod_io)
+    return model
+
+
+class SystemModel:
+    
+    def __init__(
+        self, 
+        model_name  : str  = None,
+        model_code  : str  = None,
+        input_code  : str  = None,
+        output_code : str  = None,
+        get_mod_io  : bool = None
+    ) -> None:
+        
+        self.MODEL_NAME  = model_name
+        self.MODEL_CODE  = model_code
+        self.INPUT_CODE  = input_code
+        self.OUTPUT_CODE = output_code
+        self.GET_MOD_IO  = get_mod_io
+        self.PAST_TIME   = []
+        
+        # 获取画布下的系统模型组件的输入输出桩名称
+        if self.GET_MOD_IO == True:
+            
+            # 系统模型的输入和输出的point_id
+            node = queryset.nodes().filter(code=self.MODEL_CODE).first()
+            try:
+                self.MODEL_INPUT_ID  = [i.get('point_id') for i in node.ports_in]
+                self.MODEL_OUTPUT_ID = [i.get('point_id') for i in node.ports_out]
+            except:
+                raise Exception('未获取到系统模型的输入输出ID, 需检查优化算法组件的配置是否正确')
+        else:
+            self.MODEL_INPUT_ID  = None
+            self.MODEL_OUTPUT_ID = None
+    
+    def predict(self, data:pd.DataFrame) -> pd.DataFrame:
+        all_data = []
+        for i in range(data.shape[1]):
+            try:
+                all_data.append(data.iloc[:,[i]].astype('float64'))
+            except Exception as E:
+                data_i = data.iloc[:,[i]]
+                print(f'{data_i}数据有误,不可转换为float')
+                summary_dataframe(data,df_name='系统模型输入排查')
+                raise Exception(E)
+        
+        time_start  = time.time()
+        
+        try:
+            output_data = exec_workflow(self.MODEL_NAME, self.INPUT_CODE, self.OUTPUT_CODE, *all_data)
+        except Exception as E:
+            summary_dataframe(data,df_name='系统模型输入排查')
+            raise Exception(E)
+        
+        time_end    = time.time()
+        past_time   = round(time_end-time_start,2)
+        self.PAST_TIME.append(past_time)
+        
+        print(f'第{len(self.PAST_TIME)}次调用系统模型,本次调用时长为:{past_time}秒')        
+        output_data = output_data if isinstance(output_data,list) else [output_data]
+        output_data = pd.concat(output_data,axis=1)
+        return output_data
+    
+    def summary_past_time(self):
+        try:
+            print(
+                f'SUM :{np.sum(self.PAST_TIME).round(2)} \n'
+                f'Mean:{np.mean(self.PAST_TIME).round(2)} \n'
+                f'SD  :{np.std(self.PAST_TIME).round(2)} \n'
+                f'MIN :{np.min(self.PAST_TIME)} \n'
+                f'MAX :{np.max(self.PAST_TIME)} \n'
+                f'Q25 :{np.quantile(self.PAST_TIME,0.25)} \n'
+                f'Q75 :{np.quantile(self.PAST_TIME,0.75)} \n'
+            )
+        except:
+            pass
+    
+    def predict_derivate_1(self,data:pd.DataFrame,vars:list,eps=1e-4) -> pd.DataFrame:
+        
+        if pd.Index(data=vars).has_duplicates is True:
+            raise Exception(f'{vars}中存在重复值')
+        
+        vars  = list(vars)
+        n_var = len(vars)
+        n_row = data.shape[0]
+        
+        sample_index = pd.Index(np.tile(np.arange(n_row),n_var+1),dtype='str')
+        var_index    = pd.Index(np.repeat(['raw']+vars,repeats=n_row),dtype='str')
+        index        = '__'+var_index+'__'+sample_index
+        index.name   = '__VAR__SAMPLE'
+        
+        data  = pd.DataFrame(
+            data    = np.tile(data.values,[n_var+1,1]),
+            columns = data.columns,
+            index   = index
+        )
+        
+        for var in vars:
+            if var not in data.columns:
+                raise Exception(f'数据中不包含给定的列{var}')
+            data.loc[lambda dt:dt.index.str.contains(f'__{var}__'),var] += eps
+        
+        pred = self.predict(data=data)
+        pred.index = data.index 
+        
+        all_derivate_1 = []
+        for var in vars:
+            f_dx       = pred.loc[lambda dt:dt.index.str.contains(f'__{var}__'),:]
+            f_x        = pred.loc[lambda dt:dt.index.str.contains(f'__raw__'),:]
+            derivate_1 = pd.DataFrame((f_dx.values-f_x.values)/eps,index=f_dx.index,columns=pred.columns)
+            all_derivate_1.append(derivate_1)
+        all_derivate_1 = pd.concat(all_derivate_1,axis=0)
+            
+        return all_derivate_1
+
+    def predict_derivate_2(self,data:pd.DataFrame,vars:str,eps=1e-4) -> pd.DataFrame:
+        
+        if pd.Index(data=vars).has_duplicates is True:
+            raise Exception(f'{vars}中存在重复值')
+        
+        f_x            = self.predict_derivate_1(data=data,vars=vars,eps=eps)
+        f_dx           = self.predict_derivate_1(data=data+eps,vars=vars,eps=eps)
+        all_derivate_2 = (f_dx-f_x)/eps
+        
+        return all_derivate_2

+ 234 - 0
_opt/algorithm/opt_alg.py

@@ -0,0 +1,234 @@
+import numpy as np
+import pandas as pd 
+import geatpy as ea 
+
+from ._utils.config_info import ConfigInfo
+from .opt_obj import Opt
+
+All_Algorithm = {
+    'soea_DE_rand_1_L_templet':{
+        'templet': ea.soea_DE_rand_1_L_templet,
+        'param'  : {'F':0.6,'Cr':0.85,'L':15}
+    },
+    'soea_DE_currentToBest_1_L_templet':{
+        'templet': ea.soea_DE_currentToBest_1_L_templet,
+        'param'  : {'F':0.6,'Cr':0.85,'L':15}
+    },
+    'soea_DE_best_1_L_templet':{
+        'templet': ea.soea_DE_best_1_L_templet,
+        'param'  : {'F':0.5,'Cr':0.8,'L':15}
+    },
+}
+
+def main(opt_obj:Opt,allow_neg_opt:bool,config=None) -> list:
+    
+    config_info = ConfigInfo(config)
+    
+    # 组件配置参数检查
+    config_info.check_property_exist({'NIND':'种群数量','MAXGEN':'迭代次数'})
+    
+    # 种群参数
+    Encoding   = 'RI'
+    NIND       = config['NIND']
+    Field      = ea.crtfld(
+        Encoding   = Encoding,
+        varTypes   = opt_obj.varTypes,
+        ranges     = opt_obj.ranges,
+        borders    = opt_obj.borders,
+        precisions = opt_obj.var_precis
+    )
+    population = ea.Population(Encoding,Field,NIND)
+
+    # 算法参数
+    templete = All_Algorithm[config['algorithm']]['templet']
+    param    = All_Algorithm[config['algorithm']]['param']
+    
+    Algorithm         = templete(opt_obj,population)
+    Algorithm.MAXGEN  = config['MAXGEN']       # 复杂问题需增加代数。
+    Algorithm.F       = param['F']             # 0.5~0.8 增大 F 增强全局搜索,但可能降低收敛速度。
+    Algorithm.Cr      = param['Cr']            # 0.8~1.0 高 Cr 适合强相关变量,低 Cr 增强多样性。
+    Algorithm.L       = param['L']             # 5~20    变量相关性越强,L 应越大。高维问题可设为变量维度的 10%~20%。
+    Algorithm.logTras = int(config['MAXGEN']/10)
+    Algorithm.verbose = False
+    Algorithm.drawing = 0
+
+    [bestindi,pop] = Algorithm.run()
+    
+    # 检查是否有可行解
+    #TODO 设定没有可行解时取当前解
+    if len(bestindi) == 0:
+        print_cv(pop.CV,opt_obj.constrains,opt_obj.cur_sys_constrains_cfl)
+        raise Exception('没有可行解')
+    
+    opt_system_x            = get_opt_system_x(opt_obj,bestindi)                # 完整的系统模型输入
+    opt_var                 = opt_system_x.loc[:,opt_obj.name_opt_vars]         # 优化变量
+    sys_before_after_output = get_sys_output_before_after(opt_obj,opt_system_x) # 优化前后系统模型输出
+    opt_var_before_after    = get_opt_var_before_after(opt_obj,opt_var)         # 优化前后系统模型输入(优化变量)
+    
+    # 打印优化后的结果对比
+    alg_log = print_opt_result(Algorithm,bestindi,opt_obj,sys_before_after_output,opt_var_before_after)
+    
+    # 检查本次优化是否为负优化
+    check_neg_opt(opt_obj,sys_before_after_output,allow_neg_opt)
+    
+    # 组件输出并校验
+    process_list = [alg_log,opt_var_before_after,sys_before_after_output]
+    result_list  = process_list + config_info.split_df(method='id',df=opt_var,by_group='优化变量')
+    
+    return result_list
+    
+
+def print_opt_result(
+    Algorithm,
+    bestindi,
+    opt_obj                : Opt,
+    sys_before_after_output: pd.DataFrame,
+    opt_var_before_after   : pd.DataFrame
+) -> pd.DataFrame:
+    
+    print('+'*20+' 优化结果 '+'+'*20)
+    
+    print('\n算法迭代过程:')
+    with pd.option_context('display.max_rows', None):
+        with pd.option_context('display.max_columns', None):
+            alg_log = pd.DataFrame(Algorithm.log).round(2)
+            print(alg_log)
+    
+        print(f'\n评价次数 : {Algorithm.evalsNum}')
+        print(f'所用时间 : {round(Algorithm.passTime,2)}秒')
+        print(f'最优的目标函数值({opt_obj.opt_target}) : {round(bestindi.ObjV[0][0],4)}')
+    
+        # print('\n最后一次迭代的系统模型输出:')
+        # print(opt_obj.data_y.round(4).drop_duplicates().T)
+        
+        print('\n优化前后控制变量的对比:')
+        print(opt_var_before_after)
+    
+        print('\n优化前后系统模型输出的结果对比:')
+        # 优化后系统模型的结果不唯一,取离优化目标最接近的结果
+        print(sys_before_after_output)
+        
+        print('\n系统模型的初始输入违背约束条件的情况:')
+        print_sys_init_constrains(opt_obj.cur_sys_constrains_cfl)
+    
+    print(' \n系统模型调用时长统计:')
+    opt_obj.system_model.summary_past_time()
+    
+    print('\n'+'+'*50)
+    
+    return alg_log
+
+def print_cv(
+    cv                    : np.ndarray,
+    constrains            : list,
+    cur_sys_constrains_cfl: dict
+) -> None:
+    
+    #TODO 增加每一次迭代的CV结果
+    
+    # 最后一次迭代的CV
+    print('每一个约束条件最后一次迭代的CV(大于0表明不满足约束条件):')
+    last_cv = [
+        cv.mean(axis=0),
+        cv.max(axis=0),
+        cv.min(axis=0),
+        (cv<0).sum(axis=0),
+        (cv<0).mean(axis=0),
+    ]
+    last_cv = pd.DataFrame(
+        data=np.vstack(last_cv).T,
+        columns=['Mean_CV','Max_CV','Min_CV','Count_CV<0','Pct_CV<0']
+    )
+    print(last_cv)
+    
+    #TODO 联合各个约束条件检查CV
+    
+    print('\n对应以下约束条件:')
+    for idx,cst in enumerate(constrains):
+        print(f'{idx} : {cst}<0')
+    
+    print('\n系统模型的初始输入违背约束条件的情况:')
+    print_sys_init_constrains(cur_sys_constrains_cfl)
+
+def get_opt_system_x(opt_obj:Opt,bestindi) -> pd.DataFrame:
+    # 优化变量的优化结果
+    data = opt_obj.insert_sync_var(bestindi.Phen.reshape(1,-1))
+    opt_var = pd.DataFrame(
+        data    = data,
+        columns = opt_obj.name_opt_vars,
+        index   = opt_obj.index
+    )
+    system_x = pd.concat([opt_var,opt_obj.sys_var,opt_obj.oth_var],axis=1)
+    # 将优化结果中映射边界的对应的变量进行还原
+    system_x = opt_obj.restore_data(system_x)
+    return system_x
+
+def get_sys_output_before_after(opt_obj:Opt,system_x):
+    # 优化前后,系统模型输出的变化
+    opt_system_y   = opt_obj.system_model.predict(system_x)
+    
+    sys_before_after_output = (
+        pd.concat([
+            opt_obj.current_system_y,
+            opt_system_y
+        ],axis=0)
+        .transpose()
+        .set_axis(['Before','After'],axis=1)
+        .assign(
+            Diff     = lambda dt:dt.After - dt.Before,
+            Diff_pct = lambda dt:((dt.After - dt.Before)/dt.Before * 100).round(2).astype('str') + '%'
+        )
+        .round(2)
+    )
+    return sys_before_after_output
+
+def get_opt_var_before_after(opt_obj,opt_var):
+    # 优化前后,优化变量的变化
+    result_opt = (
+        pd.concat([opt_obj.opt_var,opt_var],axis=0)
+        .reset_index(drop=True)
+        .transpose()
+        .set_axis(['Before','After'],axis=1)
+        .assign(
+            Diff     = lambda dt:dt.After - dt.Before,
+            Diff_pct = lambda dt:((dt.After - dt.Before)/dt.Before * 100).round(2).astype('str') + '%'
+        )
+        .round(2)
+    )
+    return result_opt
+
+
+def check_neg_opt(opt_obj,sys_before_after_output,allow_neg_opt):
+    if allow_neg_opt is None:
+        allow_neg_opt = False
+    
+    if allow_neg_opt is True:
+        return None
+    if not isinstance(allow_neg_opt,bool):
+        raise Exception(f'判断是否需要校验负优化的输入不是布尔值({type(allow_neg_opt)})')
+    
+    # 检查本次优化是否为负优化
+    raw_pred = sys_before_after_output.at[opt_obj.opt_target,'Before']
+    opt_pred = sys_before_after_output.at[opt_obj.opt_target,'After']
+    
+    neg_min_opt = (opt_obj.maxormins[0] == 1) and (opt_pred > raw_pred)
+    neg_max_opt = (opt_obj.maxormins[0] == -1) and (opt_pred < raw_pred)
+    
+    if neg_min_opt or neg_max_opt:
+        raise Exception(f'出现负优化,当前值为{raw_pred},优化后为{opt_pred}')
+    
+    return None
+
+def print_sys_init_constrains(cur_sys_constrains_cfl:dict):
+    # 系统模型的初始输入违背约束条件的情况
+    for cst_type,var_message in cur_sys_constrains_cfl.items():
+        # 约束类型(边界/可行性)
+        print(cst_type.upper())
+        
+        if isinstance(var_message,dict):
+            for var_name,message in var_message.items():
+                print(var_name+' : '+message)
+        
+        if isinstance(var_message,list):
+            for message in var_message:
+                print(message)

+ 265 - 0
_opt/algorithm/opt_obj.py

@@ -0,0 +1,265 @@
+import numpy as np
+import pandas as pd
+import geatpy as ea
+
+from .model.model import SystemModel
+from .constrains.parse import parse_constrains
+from .constrains.execute import execute_constrains
+
+class Opt(ea.Problem):
+    def __init__(
+        self,
+        lb             : list,
+        ub             : list,
+        varTypes       : list,
+        var_precis     : list,
+        maxormins      : int,
+        collection_map : dict,
+        var_map        : dict,
+        boundary_insert: dict,
+        opt_var        : pd.DataFrame,
+        sys_var        : pd.DataFrame,
+        system_model   : SystemModel,
+        oth_var        : pd.DataFrame,
+        cst_var        : pd.DataFrame,
+        opt_target     : str,
+        constrains     : list=[]
+    ):
+        name      = 'Opt'
+        M         = 1
+        maxormins = [maxormins] # 1:最小化该目标;-1:最大化该目标
+        Dim       = len(lb)
+        lb        = lb
+        ub        = ub
+        varTypes  = varTypes    # 变量的类型 0为连续变量,1为离散变量
+        
+        ea.Problem.__init__(
+            self,
+            name      = name,
+            M         = M,
+            maxormins = maxormins,
+            Dim       = Dim,
+            varTypes  = varTypes,
+            lb        = lb,
+            ub        = ub
+        )
+        
+        self.lb = lb 
+        self.up = ub
+        self.opt_var         = opt_var #DF
+        self.sys_var         = sys_var #DF
+        self.oth_var         = oth_var #DF
+        self.cst_var         = cst_var #DF
+        self.name_opt_vars   = opt_var.columns.to_list()
+        self.system_model    = system_model
+        self.opt_target      = opt_target     # 优化目标
+        self.constrains      = constrains     # 约束
+        self.collection_map  = collection_map # 集合类型/映射类型的边界映射 {'u1':{0:1.5,1:2.3},'u2':{0:1.1,1:3.2}}
+        self.var_map         = var_map        # 映射类型的边界的优化变量与系统变量之间的映射关系 {'u1':{'x1':{1:0.1,2:0.4},'x2':{2:0.1,3:0.4}}}
+        self.var_precis      = var_precis
+        self.boundary_insert = boundary_insert
+        self.name_opt_vars_c = self.get_name_opt_vars_c(varTypes)  # 属于连续变量的优化变量 
+        
+        self.index = opt_var.index # 用于给优化算法组件的输出的index赋值
+        
+        self.current_system_x       = self.get_current_system_x()
+        self.current_system_y       = self.get_current_system_y() # 基于当前值的预测,因此并不能够反映真实值,会受模型误差的影响
+        self.current_system_x_y     = pd.concat([self.current_system_y,self.current_system_x],axis=1)
+        # 系统模型中的x和y可能会出现重复的名称,这种情况只允许出现在x的【其他变量】中
+        # 约束的时候会将这部分x从输入中剔除,避免重复索引,所以约束变量不允许使用【其他变量】中的变量
+        oth_var_col                 = self.oth_var.columns if isinstance(oth_var,pd.DataFrame) else []
+        self.current_system_x_y_cst = pd.concat([self.current_system_y,self.current_system_x.drop(oth_var_col,axis=1)],axis=1)
+        
+        self.constrains_var = pd.concat([self.current_system_x_y_cst, self.cst_var],axis=1)
+        self.constrains     = parse_constrains(self.constrains,self.constrains_var)
+        
+        # 校验当前值是否违反边界约束,若违反则可能会使得结果出现负优化的现象
+        # 仅用于参考,因为模拟量不在约束范围内可能是由于设备未开启导致的,因此即使出现该现象,也不一定导致负优化
+        # 该负优化现象并不是由算法失效导致的,而是由于不合理的边界或可行性约束导致的
+        self.cur_sys_constrains_cfl = self.get_cur_sys_constrains_cfl()
+
+    def get_name_opt_vars_c(self,varTypes):
+        # 从变量名称中提取属于连续变量的名称
+        complete_var_types = np.array(self.insert_sync_var(varTypes,flatten=True))
+        complete_var_names = np.array(self.name_opt_vars)
+        is_var_continus    = complete_var_types == 0
+        name_opt_vars_c    = complete_var_names[is_var_continus]
+        return name_opt_vars_c
+    
+    def check_data_and_system_input(self,data_input:list) -> None:
+        
+        # 检查系统模型和优化组件的输入
+        system_model_input = self.system_model.MODEL_INPUT_ID
+        if system_model_input is None:
+            return None
+        #TODO 当不校验系统输入的名称时,也需要校验长度
+        
+        # 检查是否有多或少
+        input_diff = set(data_input).symmetric_difference(system_model_input)
+        if len(input_diff) !=0:
+            raise Exception(f'系统模型的输入与优化组件的输入不匹配,差异的输入为{input_diff},其中系统模型的输入为{system_model_input},优化组件的输入为{data_input}')
+        
+        # 检查顺序,若顺序有差异,输出系统模型的输入
+        if data_input != system_model_input:
+            raise Exception(f'系统模型和优化组件输入的顺序有差异,其中系统模型的输入为{system_model_input},优化组件的输入为{data_input}')
+        
+    def get_current_system_x(self):
+        data_x_current = pd.concat([self.opt_var,self.sys_var,self.oth_var],axis=1)
+        self.check_data_and_system_input(data_x_current.columns.to_list())
+        
+        # 数据校验
+        if data_x_current.shape[0] != 1:
+            print('系统模型的初始输入数据')
+            print(data_x_current)
+            raise Exception('系统模型的初始输入数据的行维度不等于1,检查输入数据')
+        
+        return data_x_current
+    
+    def get_current_system_y(self):
+        data_y_current = self.system_model.predict(self.current_system_x)
+        
+        print(f'系统模型的输出变量:{data_y_current.columns.to_list()}')
+        
+        ## 校验系统模型的输出是否符合预期
+        
+        # 目标变量存在性校验
+        if self.opt_target not in data_y_current.columns:
+            raise ValueError(f'未在系统模型的输出中找到优化目标{self.opt_target},需检查系统模型优化的输出{data_y_current.columns.to_list()}')
+        
+        # 目标变量唯一性校验
+        if isinstance(data_y_current.loc[:,self.opt_target],pd.DataFrame):
+            raise ValueError(f'在系统模型的输出中找到多个相同名称的优化目标{self.opt_target},需检查系统模型的输出')
+   
+        return data_y_current
+    
+    def get_cur_sys_constrains_cfl(self) -> dict:
+        """
+        获取与当前值违背的约束条件及边界条件
+        """
+        conflicts = {
+            'boundary_int': {}, # 区间边界
+            'boundary_col': {}, # 集合及映射边界
+            'constrains'  : []
+        }
+        
+        # 边界检查
+        for idx,name in enumerate(self.name_opt_vars):
+            cur_value = self.current_system_x.loc[:,[name]].iat[0,0]
+            
+            # 集合与映射边界的优化变量
+            if name in self.collection_map.keys():
+                collection = self.collection_map[name].values()
+                opt_ub = max(collection)
+                opt_lb = min(collection)
+                if cur_value not in collection:
+                    conflicts['boundary_col'][name] = '优化变量不在集合内,但未超出边界范围'
+                
+                if (cur_value > opt_ub) or (cur_value < opt_lb):
+                    message = f'优化变量不在集合内,且超出边界范围,当前值={cur_value},上边界={opt_ub},下边界={opt_lb}'
+                    conflicts['boundary_col'][name] = message
+            
+            # 区间边界的优化变量
+            else:
+                opt_ub = self.insert_sync_var(self.ub,flatten=True)[idx]
+                opt_lb = self.insert_sync_var(self.lb,flatten=True)[idx]
+                if (cur_value > opt_ub) or (cur_value < opt_lb):
+                    message = f'超出边界范围,当前值={round(cur_value,2)},上边界={opt_ub},下边界={opt_lb}'
+                    conflicts['boundary_int'][name] = message
+        
+        # 可行性检查
+        for cst in self.constrains:
+            cv = execute_constrains(self.current_system_x_y_cst,cst).flatten()[0]
+            if cv > 0:
+                conflicts['constrains'].append(cst)
+        
+        return conflicts
+    
+    def restore_data(self,data_x,step=['var_map','col_map']):
+        # 处理映射类型边界的系统变量(由于依赖于离散变量的序号,因此该步必须先于离散变量还原以前进行)
+        # 根据优化变量, 还原系统变量
+        if (len(self.var_map) > 0) and ('var_map' in step):
+            for var_opt_name,var_map_dict in self.var_map.items():
+                for var_sys_name,var_sys_map in var_map_dict.items():
+                    data_x[var_sys_name] = data_x[var_opt_name].astype(int).map(var_sys_map,na_action='ignore')
+        
+        # 处理边界(集合型/映射型),在优化算法组件中需将这部分变量还原
+        # 还原优化变量
+        if (len(self.collection_map) > 0) and ('col_map' in step):
+            for var_name,map_dict in self.collection_map.items():
+                data_x[var_name] = data_x[var_name].astype('int').map(map_dict,na_action='ignore')
+        
+        return data_x
+    
+    def insert_sync_var(self,data:np.ndarray,flatten=False) -> np.ndarray:
+        cum_num = 0
+        for pos,num in self.boundary_insert.items():
+            if num > 0:
+                cur_pos = cum_num + pos  
+                
+                if flatten == False:
+                    insert_array = np.repeat(data[:,[cur_pos]],repeats=num,axis=1)
+                    data = np.insert(data,[cur_pos],insert_array,axis=1)
+                elif flatten == True:
+                    insert_array = np.repeat(data[cur_pos],repeats=num)
+                    data = np.insert(data,cur_pos,insert_array)
+                cum_num += num
+                
+        return data 
+    
+    def aimFunc(self, pop):
+        Vars = pop.Phen
+        Vars = self.insert_sync_var(Vars)
+        N    = Vars.shape[0]
+        
+        # 汇总系统模型的输入
+        data_opt     = pd.DataFrame(data=Vars,columns=self.name_opt_vars)
+        data_sys     = data_copy_row(self.sys_var,N_row=N)
+        data_oth     = data_copy_row(self.oth_var,N_row=N)
+        data_x       = pd.concat([data_opt,data_sys,data_oth],axis=1)
+        
+        # 待解决问题
+        # 输入给系统模型的Index可能会包含时间信息参与计算,因此需要输入
+        # 但是输入相同的时间index可能会导致系统模型报错(例如出现reindex的操作)
+        # data_x.index = self.index.repeat(N) 
+        
+        # 根据边界条件将数据还原
+        data_x = self.restore_data(data_x)
+        
+        self.data_y = self.system_model.predict(data_x)
+        pop.ObjV    = self.data_y.loc[:,[self.opt_target]].to_numpy().reshape(-1,1)
+
+        data = pd.concat([data_x,self.data_y],axis=1)
+        
+        # 设定约束条件
+        if self.constrains is None:
+            return
+        if len(self.constrains)==0:
+            return
+        cv = []
+        for cst in self.constrains:
+            cv.append(execute_constrains(data,cst))
+        pop.CV = np.hstack(cv)
+        
+        report_cv_info(cv=pop.CV,constrains=self.constrains)
+
+      
+def data_copy_row(data:pd.DataFrame,N_row:int):
+    if data is None:
+        return None
+    result = np.ones([N_row,len(data.columns)]) * data.to_numpy()
+    result = pd.DataFrame(data=result,columns=data.columns)
+    return result
+
+def report_cv_info(cv:np.ndarray,constrains:list):
+    suit_cv          = cv<=0
+    suit_pop         = suit_cv.all(axis=1)
+    not_suit_cst     = suit_cv.mean(axis=0)<0.7
+    not_suit_cst_idx = np.arange(cv.shape[1])[not_suit_cst]
+    not_suit_cst_pct = suit_cv.mean(axis=0)[not_suit_cst].round(2)
+    
+    print(f'本次迭代可行种群数为{suit_pop.sum()},占比{(suit_pop.mean()*100).round(2)}%')
+    if len(not_suit_cst_idx) > 0:
+        print(f'满足以下约束条件的种群数量小于70%, 共{not_suit_cst.sum()}条约束, 可能会影响优化性能')
+        for idx,pct in dict(zip(not_suit_cst_idx,not_suit_cst_pct)).items():
+            cst = constrains[idx]
+            print(f'序号:{idx} \t 约束满足比例:{round(pct*100,2)}% \t 约束:{cst}')

+ 87 - 0
_opt/algorithm/sim_config.py

@@ -0,0 +1,87 @@
+import numpy as np 
+
+def simulate_config(
+    target          : str,
+    dir_min         : bool,
+    var_opt         : list,
+    var_sys         : list = None,
+    var_oth         : list = None,
+    var_out         : list = None,
+    var_init_op     : list = None,
+    constrains      : list = None,   # [约束1,约束2,约束3]
+    constrains_range: list = None,  # [{变量a,上限,下限}, {变量b,上限,下限}]
+    algorithm       : str = 'soea_DE_currentToBest_1_L_templet',
+    NIND            : int  = 300,
+    MAXGEN          : int  = 200,
+    diag_model      : bool = False,
+    allow_neg_opt   : bool = True
+):
+    # 公式约束
+    if constrains is not None:
+        constrains = [{'constrain':c} for c in constrains]
+    else:
+        constrains = []
+    
+    # 范围约束
+    if constrains_range is not None:
+        res_constrains_range = []
+        for con in constrains_range:
+            var     = con[0]
+            ub      = con[1]
+            lb      = con[2]
+            con_res = {}
+            con_res['var_name'] = var 
+            if ub is not None:
+                con_res['ub'] = ub 
+            if lb is not None:
+                con_res['lb'] = lb 
+            res_constrains_range.append(con_res)
+    else:
+        res_constrains_range = []
+    
+    var_sys       = [] if var_sys is None else var_sys
+    var_oth       = [] if var_oth is None else var_oth
+    var_out       = [] if var_out is None else var_out
+    var_init_op   = [] if var_init_op is None else var_init_op
+    var_process   = ['迭代过程','优化变量对比','输出变量对比']
+    input_var_id  = var_opt + var_sys + var_oth + var_out
+    output_var_id = var_process + var_opt + var_init_op
+    
+    input_id_end = [len(var) for var in [var_opt,var_sys,var_oth,var_out]]
+    input_id_end = np.cumsum(input_id_end)
+    
+    output_id_end = [len(var) for var in [var_process,var_opt,var_init_op]]
+    output_id_end = np.cumsum(output_id_end)
+    
+    config = {
+        'algorithm'       : algorithm,
+        'NIND'            : NIND,
+        'MAXGEN'          : MAXGEN,
+        'target'          : target,
+        'dir_min'         : dir_min,
+        'diag_model'      : diag_model,
+        'get_mod_io'      : False,
+        'input_code'      : 'input',
+        'model_code'      : 'sys',
+        'model_name'      : 'open_sys56',
+        'output_code'     : 'output',
+        'allow_neg_opt'   : allow_neg_opt,
+        'constrains'      : constrains,
+        'constrains_range': res_constrains_range,
+        '_PORTS_IN'       : [{'point_id':p_id} for p_id in input_var_id],
+        '_PORTS_OUT'      : [{'point_id':p_id} for p_id in output_var_id],
+        '_PORTS_IN_GROUP':[
+            {'name': '优化变量', 'start': 0,  'end': input_id_end[0]}, 
+            {'name': '系统变量', 'start': input_id_end[0], 'end': input_id_end[1]}, 
+            {'name': '其他变量', 'start': input_id_end[1], 'end': input_id_end[2]}, 
+            {'name': '外部变量', 'start': input_id_end[2], 'end': input_id_end[3]}, 
+            # {'name': '动态配置', 'start': input_id_end[3], 'end': input_id_end[4]}
+        ],
+        '_PORTS_OUT_GROUP':[
+            {'name': '优化过程', 'start': 0,  'end': output_id_end[0]}, 
+            {'name': '优化变量', 'start': output_id_end[0],  'end': output_id_end[1]}, 
+            {'name': '初始输出', 'start': output_id_end[1], 'end': output_id_end[2]}, 
+        ],
+    }
+    
+    return config

+ 226 - 0
_opt/boundary/_utils/config_info.py

@@ -0,0 +1,226 @@
+import pandas as pd
+
+class ConfigInfo:
+    def __init__(self,config) -> None:
+        self.config   = config
+        self.n_input  = len(self.get_io_id('in'))
+        self.n_output = len(self.get_io_id('out'))
+    
+    def get_io_id(self,io = 'in') -> list:
+        if io == 'in':
+            io_key = '_PORTS_IN'
+        elif io == 'out':
+            io_key = '_PORTS_OUT'
+        else:
+            raise Exception('WRONG io')
+        
+        input_id = [point['point_id'] for point in self.config[io_key]]
+        
+        return input_id
+    
+    def get_io_group_info(self,io='in',type:str='data',data:list=None) -> dict:
+        if io == 'in':
+            io_key = '_PORTS_IN_GROUP'
+        elif io == 'out':
+            io_key = '_PORTS_OUT_GROUP'
+        
+        group_info = {}
+        for group in self.config[io_key]:
+            name           = group['name']
+            start_idx      = group['start']
+            end_idx        = group['end']
+            
+            if type == 'data':
+                # group_info = {'G1':[DF1,DF2], 'G2':[DF3,DF4]}
+                if data is None:
+                    raise Exception('当type为data时,必须输入data参数')
+                info = data[start_idx:end_idx]
+            
+            elif type == 'point_id':
+                # group_info = {'G1':[point_1,point_2],'G2':[point_3,point_4]}
+                point_id = self.get_io_id(io=io)
+                info     = point_id[start_idx:end_idx]
+            
+            else:
+                raise Exception('WRONG type')
+            
+            group_info[name] = info
+        
+        return group_info
+    
+    def rename_df(self,dfs:list,io='in') -> list:
+        
+        result = []
+        point_id = self.get_io_id(io=io)
+        
+        if len(dfs) != len(point_id):
+            raise Exception(f'数据长度有误,point_id:{point_id},dfs:{dfs}')
+        
+        for p,df in zip(point_id,dfs):
+            if not isinstance(df,pd.DataFrame) or not isinstance(p,str):
+                result.append(df)
+            else:
+                result.append(
+                    df.iloc[:,[0]].set_axis([p],axis=1)
+                )
+        
+        return result
+    
+    def split_df_by_groupinfo(
+        self,
+        data_map:dict,
+        allow_data_map_is_subset:bool  = False,
+        allow_data_map_miss_group:list = None
+    ) -> list:
+        """
+        根据组件配置的输出分组和桩,将数据输出
+
+        Parameters
+        ----------
+        data_map : dict
+            分组名称及对应的数据 {分组1:DataFrame, 分组2:{桩A:Float,桩B:Bool}}
+
+        Returns
+        -------
+        list
+            数据列表
+
+        Raises
+        ------
+        Exception
+            分组名称超出了限定的范围
+        Exception
+            桩名称超出了限定的范围
+        """
+        output_groupinfo = self.get_io_group_info('out',type='point_id')
+        
+        split_data = []
+        
+        for group_name,points in output_groupinfo.items():
+            data = data_map.get(group_name)
+            
+            if data is None:
+                if isinstance(allow_data_map_miss_group,list) and group_name in allow_data_map_miss_group:
+                    continue
+                elif not allow_data_map_is_subset:
+                    raise Exception(f'组件输出的分组名称{group_name}有误,分组:{list(data_map.keys())}')
+                else:
+                    continue
+            
+            for p in points:
+                
+                if isinstance(data,pd.DataFrame):
+                    if p not in data.columns:
+                        raise Exception(f'组件输出的桩名称有误,未找到{p},桩:{data.columns.to_list()}')
+                    p_data = data.loc[:,[p]]
+                
+                elif isinstance(data,dict):
+                    if p not in data.keys():
+                        raise Exception(f'组件输出的桩名称有误,未找到{p},桩:{list(data.keys())}')
+                    p_data = data[p]
+                    
+                split_data.append(p_data)
+        
+        return split_data
+            
+    
+    def split_df(self,method:str,df:pd.DataFrame,by_group=None) -> list:
+        
+        if by_group is not None:
+            output_point_id = self.get_io_group_info(io='out',type='point_id')[by_group]
+            output_n = len(output_point_id)
+        else:
+            output_point_id = self.get_io_id(io='out')
+            output_n = self.n_output
+        
+        df_list = []
+        
+        if method == 'idx':
+            
+            if df.shape[1] != output_n:
+                raise Exception(f'输出数据的个数不等于原数据中的列数,以下是原数据中包含的列:{df.columns.to_list()}')
+            
+            for idx in range(output_n):
+                df_list.append(df.iloc[:,[idx]])    
+            
+        elif method == 'id':
+            for point_id in output_point_id:
+                if point_id not in df.columns:
+                    raise Exception(f'数据中没有{point_id},以下是数据中包含的列:{df.columns.to_list()}')
+                df_list.append(df.loc[:,[point_id]])
+        
+        return df_list
+    
+    def get_property(self,key:str,default=None):
+        # 获取组件配置的属性
+        if key not in self.config.keys():
+            return default
+        
+        property = self.config[key]
+        if property is None or property == '':
+            return default
+        
+        return property
+    
+    def check_property_exist(self,property_name:dict):
+        for param,param_name in property_name.items():
+            try:
+                self.config[param]
+            except:
+                raise Exception(f'组件缺少自定义参数:{param_name}')
+    
+    def check_io_equal(self):
+        input_id = self.get_io_id('in')
+        output_id = self.get_io_id('out')
+        is_equal = input_id == output_id
+        return is_equal
+    
+    
+if __name__ == '__main__':
+    #############################
+    config = {
+        '_PORTS_IN': [], 
+        '_PORTS_OUT': 
+            [
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': ''}], 'name': '','type': 'DF','static': True, 'point_id': 'a'}, 
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': 'P_ND2_Tdb'}], 'name': 'ND2_室外温度', 'type': 'DF', 'static': True, 'point_id': 'b'}, 
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': ''}], 'name': '','point_id': 'c'}, 
+                {'cols': [{'type': 'date', 'title': '时间'}, {'type': 'float', 'title': ''}], 'name': '', 'point_id': 'd'}
+            ], 
+        '_PORTS_IN_GROUP': [],
+        '_PORTS_OUT_GROUP': 
+            [
+                {'id': '1696909849014', 'end': 2, 'name': 'X', 'start': 0, 'static': True}, 
+                {'id': '1696909849434', 'end': 4, 'name': 'Y', 'start': 2, 'static': True}
+            ], 
+            '_CODE'       : None,
+            '_DEVICE_CODE': None
+    }
+    config_info = ConfigInfo(config)
+    
+    df1 = pd.DataFrame({'a':[1,2,3],'b':[4,5,6]})
+    df2 = pd.DataFrame({'c':[7,8,9],'d':[10,11,12]})
+    
+    res1 = config_info.split_df_by_groupinfo(df_map= {'X':df1,'Y':df2})
+    print('res1',res1)
+    
+    ###########################
+    config = {
+        '_PORTS_OUT': [], 
+        '_PORTS_IN': 
+            [
+                {'point_id': 'a'}, 
+                {'point_id': 'b'}, 
+            ], 
+        '_PORTS_IN_GROUP': [],
+        '_PORTS_OUT_GROUP': 
+            [
+            ], 
+            '_CODE'       : None,
+            '_DEVICE_CODE': None
+    }
+    df1 = pd.DataFrame({'x':[1,2,3]})
+    df2 = pd.DataFrame({'y':[1,2,3]})
+    config_info = ConfigInfo(config)
+    res2 = config_info.rename_df([df1,df2])
+    print('res2',res2)

+ 166 - 0
_opt/boundary/_utils/data_cleaner.py

@@ -0,0 +1,166 @@
+import numpy as np
+import pandas as pd 
+
+from .data_summary import summary_dataframe
+
+class DataCleaner:
+    
+    def __init__(self,data:pd.DataFrame,print_process=True) -> None:
+        self.raw_data      = data
+        self.data          = data.copy()
+        self.drop_index    = np.array([False]*len(self.raw_data))
+        self.print_process = print_process
+        
+        if self.print_process:
+            summary_dataframe(df=self.raw_data,df_name='原始数据')
+    
+    def rm_na_and_inf(self):
+        # 删除缺失数据
+        is_na_data      = self.data.isna().any(axis=1).values
+        is_inf_data     = np.any(np.isinf(self.data.values),axis=1)
+        drop_index      = is_na_data | is_inf_data
+        self.drop_index = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_na_and_inf')
+        return self
+    
+    def rm_constant(
+        self,
+        window        :int = 10,
+        include_cols :list = '__ALL__',
+        include_by_re:bool = False,
+        exclude_cols :list = None
+    ):
+        # 删除常数
+        data              = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        drop_index_matrix = (data.rolling(window=window).std()==0)
+        drop_index        = drop_index_matrix.any(axis=1)
+        self.drop_index   = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_constant',index_matrix=drop_index_matrix,var_name=data.columns)
+        return self 
+
+    def rm_rolling_fluct(
+        self,
+        window        :int   = 10,
+        unit          :str   = 'min',
+        fun           :str   = 'ptp',
+        thre          :float = 0,
+        include_cols  :list  = '__ALL__',
+        include_by_re :bool  = False,
+        exclude_cols  :list  = None
+    ):
+        data = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        
+        if unit is None:
+            roll_window = window
+        else:
+            roll_window = str(window) + unit
+        roll_data = data.rolling(window=roll_window,min_periods=1,center=True) 
+        
+        if fun == 'ptp':
+            res = roll_data.max() - roll_data.min()
+        elif fun == 'pct':
+            res = (roll_data.max() - roll_data.min())/roll_data.min()
+        drop_index_matrix = res>thre
+        drop_index = drop_index_matrix.any(axis=1)
+        self.drop_index = self.drop_index | drop_index
+        self._count_removed_data(index=drop_index,method='rm_rolling_fluct',index_matrix=drop_index_matrix,var_name=data.columns)
+        return self
+    
+    def rm_outlier_rolling_mean(
+        self,
+        window       :int    = 10,
+        thre         :float  = 0.02,
+        include_cols :list   = '__ALL__',
+        include_by_re:bool   = False,
+        exclude_cols :list   = None
+    ):
+        # 删除时序异常
+        data            = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        data            = data.reset_index(drop=True)
+        windows_mean    = data.rolling(window=window,min_periods=1).mean()
+        drop_index      = (((data - windows_mean)/data).abs()>thre).any(axis=1).values
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method='rm_outlier_mean')
+        return self 
+    
+    def rm_negative(
+        self,
+        keep_zero     :bool = False,
+        include_cols :list  = '__ALL__',
+        include_by_re:bool  = False,
+        exclude_cols :list  = None
+    ):
+        # 删除负数
+        data = self._get_data_by_cols(include_cols,include_by_re,exclude_cols)
+        if keep_zero is True:
+            drop_index = (data<0).any(axis=1).values
+        else:
+            drop_index = (data<=0).any(axis=1).values
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method='rm_negative')
+        return self
+    
+    def rm_rule(self,remove_rule:str):
+        # 基于规则删除数据
+        data            = self.data.copy()
+        drop_index      = np.array(data.eval(remove_rule))
+        self.drop_index = drop_index | self.drop_index
+        self._count_removed_data(index=drop_index,method=f'rm_rule({remove_rule})')
+        return self
+
+    def get_data(self,fill=None) -> pd.DataFrame:
+        if fill is None:
+            # 保留非删除数据
+            result_data = self.raw_data.loc[~self.drop_index,:]
+        else:
+            # 填充非删除数据
+            result_data = self.raw_data.copy()
+            result_data.loc[self.drop_index,:] = fill 
+        
+        if self.print_process: 
+            summary_dataframe(result_data,df_name='结果数据')
+        return result_data
+    
+    def _get_data_by_cols(
+        self,
+        include_cols :list = '__ALL__',
+        include_by_re:bool = False,
+        exclude_cols :list = None,
+    ) -> pd.DataFrame:
+        data = self.data.copy()
+        
+        if include_by_re is True:
+            if isinstance(include_cols,str):
+                cols = data.loc[:,data.columns.str.contains(include_cols,regex=True)].columns
+            else:
+                raise Exception('WRONG')
+            
+        elif include_by_re is False:
+            if include_cols == '__ALL__':
+                cols = data.columns
+            elif isinstance(include_cols,str):
+                cols = [include_cols]
+            elif isinstance(include_cols,list):
+                cols = data.loc[:,include_cols].columns
+            else:
+                raise Exception('WRONG')
+            
+        if exclude_cols is not None:
+            cols = cols.difference(other=exclude_cols)
+        
+        return data.loc[:,cols]
+        
+    
+    def _count_removed_data(self,index,method,index_matrix=None,var_name=None):
+        count = index.sum()
+        pct   = round(count / len(index) * 100,2)
+        if self.print_process:
+            print(f'remove {count}({pct}%) by {method}')
+        
+        if index_matrix is not None and var_name is not None:
+            var_drop_count = np.sum(index_matrix,axis=0)
+            for var,drop_count in zip(var_name,var_drop_count):
+                if drop_count == 0:
+                    continue
+                if self.print_process:
+                    print(f'{var}:{drop_count}')

+ 671 - 0
_opt/boundary/_utils/data_service.py

@@ -0,0 +1,671 @@
+import json
+import datetime
+import time
+import pandas as pd
+import traceback
+import sys
+import os
+from functools import partial
+from dateutil import tz
+
+try:
+    from workflowlib import requests
+except:
+    import requests
+
+
+urlcfg = {
+    'getpointdata_url'      : "data/getpointdata",
+    'getpointsdata_url'     : "data/getpointsdata",
+    'getpointsdataforai_url': "data/getpointsdataforai",
+    'getpointsruntime_url'  : "data/getpointsruntime",
+    'getcurrdata_url'       : "ai/getcurrdata",
+    'gethisdata_url'        : "ai/gethisdata",
+    'putaidata_url'         : "ai/putaidata",
+    'uploadaifile_url'      : "ai/uploadaifile",
+    'addpointdatum_url'     : "ai/addpointdatum",
+}
+
+
+class PointReader:
+    
+    root_url               = os.environ.get('DATA_ROOT_URL')
+    upload_url             = os.environ.get('DATA_UPLOAD_URL')
+    getpointdata_url       = urlcfg["getpointdata_url"]
+    getpointsdata_url      = urlcfg["getpointsdata_url"]
+    getpointsdataforai_url = urlcfg["getpointsdataforai_url"]
+    getpointsruntime_url   = urlcfg["getpointsruntime_url"]
+    getcurrdata_url        = urlcfg["getcurrdata_url"]
+    gethisdata_url         = urlcfg["gethisdata_url"]
+    putaidata_url          = urlcfg["putaidata_url"]
+    uploadaifile_url       = urlcfg["uploadaifile_url"]
+    addpointdatum_url      = urlcfg["addpointdatum_url"]
+
+    dtfromts = partial(datetime.datetime.fromtimestamp, tz=tz.gettz('Asia/Shanghai'))
+    # 最大连接重试次数,连接失败等待时间
+    max_try    = 10
+    post_sleep = 1
+    
+    def __init__(self,url=None) -> None:
+        if url is not None:
+            self.url = url
+            print(f'使用临时url:{self.url}')
+        elif self.root_url is None:
+            raise Exception('未在环境变量中获取到 DATA_ROOT_URL')
+        else:
+            self.url = self.root_url + self.getpointsdata_url
+    
+    # 传入包含多个point id的list,返回一个dataframe里面包含了ts,point_id,value
+    def get_points_data(self,point_ids, from_time, to_time, interval=1, type_=3, ts2dt_col=None, return_type='dict'):
+        """
+        :param point_ids: list
+        :param from_time: datetime 开始时间
+        :param to_time: datetime 结束时间
+        :param interval: int=1 时间间隔
+        :param type_: =3 后端业务要求
+        :param ts2dt_col: list timestamp需要转换为datetime的列名
+        :param return_type: str in {'dict', 'df', 'dfcol'} default='dict' 指定返回的数据结构
+        'dict' 返回 {point_id: DataFrame} (原始结构)
+        'df' 返回各点位加入 point_id 列再按行拼合后的结果
+        'dfcol' 返回各点位以时间戳为索引,按列拼合,并用 point_id 作为 value 的列名,有 value 时才生效
+        :return: DataFrame
+        """
+        post_data = {
+            "point_ids": point_ids,
+            "begin"    : round(from_time.timestamp()),
+            "end"      : round(to_time.timestamp()),
+            "interval" : interval,
+            "type"     : type_,
+        }
+        rem_try = self.max_try
+        while rem_try > 0:
+            try:
+                resp = requests.post(url=self.url, data=json.dumps(post_data),timeout=60)
+                data = resp.json()['data']
+                if data:
+                    res = dict()
+                    for point in data:
+                        res[point['point_id']] = pd.DataFrame(point['data'])
+                        if ts2dt_col is not None:
+                            res[point['point_id']] = self.ts2dt(res[point['point_id']], ts2dt_col)
+                        # res[point['point_id']].set_index(['ts'], inplace=True)
+                    if return_type == 'dict':
+                        return res
+                    elif return_type == 'df':
+                        for point in res.keys():
+                            res[point]['point_id'] = point
+                        return pd.concat(res.values(), axis=0)
+                    elif return_type == 'dfcol':
+                        res_df = pd.DataFrame()
+                        for point_id, df_ in res.items():
+                            res_df = pd.concat(
+                                [res_df, df_.set_index('ts').rename(columns={'value': point_id})],
+                                axis=1)
+                        return res_df.reset_index()
+                else:
+                    rem_try -= 1
+                    time.sleep(self.post_sleep)
+            except Exception as e:
+                self.error_print(sys._getframe().f_code.co_name)
+                rem_try -= 1
+                time.sleep(self.post_sleep)
+        if rem_try == 0:
+            print("\nget_points_data failed")
+
+    # 内部函数:打印报错信息
+    def error_print(self,func_name):
+        print()
+        print(f"{self.dtfromts(time.time())}:")
+        print(f"function {func_name} error!")
+        print(f"Exception Info:")
+        e_type, e_value, e_traceback = sys.exc_info()
+        print(e_type)
+        print(e_value)
+        traceback.print_tb(e_traceback)
+        print()
+
+    # 内部函数:将timestamp转换成datetime
+    def ts2dt(self,df: pd.DataFrame, cols):
+        for col in cols:
+            df[col] = pd.Series(map(self.dtfromts, df[col]))
+        return df
+
+class PointWriter:
+    upload_url        = os.environ.get('DATA_UPLOAD_URL')
+    addpointdatum_url = urlcfg.get('addpointdatum_url')
+    # 最大连接重试次数,连接失败等待时间
+    max_try    = 10
+    post_sleep = 1
+    
+    def __init__(self,url=None) -> None:
+        if url is not None:
+            self.url = url
+            print(f'使用临时url:{self.url}')
+        elif self.upload_url is None:
+            raise Exception('未在环境变量中获取到 DATA_UPLOAD_URL')
+        else:
+            self.url = self.upload_url + self.addpointdatum_url
+    
+    # 上传数据至点位数据库
+    def ai_add_point_data(self, point_id, ts, value):
+        """
+        :param point_id: str 数据点位,需要预先在点位库中创建好
+        :param timestamp: str 当前时刻的时间 datetime
+        :param value: str 点位数值
+        :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+        """
+        url = self.upload_url + self.addpointdatum_url
+        post_data = [
+            {
+                "point_id": str(point_id),
+                "data":[{"ts": int(ts.timestamp()), "value": str(value)}]
+                }
+            ]
+
+        rem_try = self.max_try
+        while rem_try > 0:
+            try:
+                resp = requests.post(url=url,headers = {'Content-Type': 'application/json; charset=UTF-8'}, data = json.dumps(post_data),timeout=60)
+                state = resp.json()['state']
+                if state == 0:
+                    print(f"\nput {point_id} data success!")
+                    return resp.json()
+                else:
+                    print(f"strange resp: {resp.json()}")
+                    rem_try -= 1
+                    time.sleep(self.post_sleep)
+                resp.close()
+            except Exception as e:
+                self.error_print(sys._getframe().f_code.co_name)
+                rem_try -= 1
+                time.sleep(self.post_sleep)
+        if rem_try == 0:
+            print("\nai_add_point_data failed")
+            return None
+
+    # 内部函数:打印报错信息
+    def error_print(self,func_name):
+        print()
+        print(f"{self.dtfromts(time.time())}:")
+        print(f"function {func_name} error!")
+        print(f"Exception Info:")
+        e_type, e_value, e_traceback = sys.exc_info()
+        print(e_type)
+        print(e_value)
+        traceback.print_tb(e_traceback)
+        print()
+        
+
+       
+# # 内部函数:将datetime转换成timestamp
+# def dt2ts(df: pd.DataFrame, cols):
+#     for col in cols:
+#         df[col] = pd.Series([dt.timestamp() for dt in df[col]])
+#     return df
+
+
+
+
+
+# # 传入一个point id,返回一个dataframe里面包含了ts,point_id,value
+# def get_point_data(point_id, from_time, to_time, interval=1, type_=3, ts2dt_col=None):
+#     """
+#     :param point_id: string
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param interval: int=1 时间间隔
+#     :param type_: =3 后端业务要求
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :return: DataFrame 包含时间
+#     """
+#     url = root_url + getpointdata_url
+#     post_data = {
+#         "point_id": point_id,
+#         "begin": round(from_time.timestamp()),
+#         "end": round(to_time.timestamp()),
+#         "interval": interval,
+#         "type": type_,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             ts_data = resp.json()['data']
+#             if ts_data:
+#                 res = pd.DataFrame(ts_data)
+#                 if ts2dt_col is not None:
+#                     res = ts2dt(res, ts2dt_col)
+#                 # res.set_index(['ts'], inplace=True)
+#                 return res
+#             else:
+#                 rem_try -= 1
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nget_point_data failed")
+
+
+
+# # 传入包含多个point id的list,返回一个dataframe里面包含了ts,多个point_id求总后的value
+# def sum_points_by_ts(point_ids, from_time, to_time, interval=1, type_=3, ts2dt_col=None):
+#     """
+#     :param point_ids: list
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param interval: int=1 时间间隔
+#     :param type_: =3 后端业务要求
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :return: DataFrame
+#     """
+#     try:
+#         data_dict = get_points_data(point_ids, from_time, to_time, interval, type_, ts2dt_col)
+#         for point_id in data_dict.keys():
+#             data_dict[point_id].set_index(['ts'], inplace=True)
+#         return sum(_ for _ in data_dict.values()).reset_index()
+#     except Exception as e:
+#         error_print(sys._getframe().f_code.co_name)
+#         time.sleep(post_sleep)
+#         print("\nsum_points_by_ts failed")
+
+
+# # 传入包含多个point id的list,返回所有point_id最新的一条数据
+# def get_points_run_time(point_ids, ts2dt_col=['ts']):
+#     """
+#     :param point_ids: list
+#     :param ts2dt_col: list=['ts'] timestamp需要转换为datetime的列名
+#     :return: DataFrame
+#     """
+#     url = root_url + getpointsruntime_url
+#     post_data = {
+#         "point_ids": point_ids,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data = resp.json()['data']
+#             if data:
+#                 res = pd.DataFrame(data)
+#                 if ts2dt_col is not None:
+#                     res = ts2dt(res, ts2dt_col)
+#                 return res
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nget_points_run_time failed")
+
+
+# # 获取最新的AI数据
+# def ai_get_curr_data(
+#         model_id, model_version, algo, algo_version, module_id,
+#         ts2dt_col=None,
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :return: datetime, DataFrame 时间戳和数据
+#     """
+#     url = upload_url + getcurrdata_url
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data_resp = resp.json()['data']
+#             if data_resp:
+#                 # res = dict()
+#                 # for row in data_resp:
+#                 #     res[row['ts']] = pd.DataFrame(json.loads(row['data']))
+#                 ts_dt = dtfromts(data_resp[0]['ts'])
+#                 res = pd.DataFrame(json.loads(data_resp[0]['data']))
+#                 if ts2dt_col is not None:
+#                     res = ts2dt(res, ts2dt_col)
+#                 return ts_dt, res
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_get_curr_data failed")
+
+
+# # 获取历史AI数据
+# def ai_get_his_data(
+#         model_id, model_version, algo, algo_version, module_id,
+#         from_time: datetime.datetime,
+#         to_time: datetime.datetime,
+#         ts2dt_col=None,
+#         return_type='df'
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :param return_type: str='df' in {'dict', 'df'} 指定返回的数据结构
+#     'dict' 返回 {ts: DataFrame} (原始结构)
+#     'df' 返回各 DataFrame 加入 ts 时间列再按行拼合后的结果
+#     :return: DataFrame里面包含了ts, value
+#     """
+#     url = upload_url + gethisdata_url
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#         "begin": round(from_time.timestamp()),
+#         "end": round(to_time.timestamp()),
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data_resp = resp.json()['data']
+#             if data_resp:
+#                 res = dict()
+#                 for row in data_resp:
+#                     ts = dtfromts(row['ts'])
+#                     res[ts] = pd.DataFrame(json.loads(row['data']))
+#                     if ts2dt_col is not None:
+#                         res[ts] = ts2dt(res[ts], ts2dt_col)
+#                 if return_type == 'dict':
+#                     return res
+#                 elif return_type == 'df':
+#                     for ts in res.keys():
+#                         res[ts]['ts'] = ts
+#                     return pd.concat(res.values(), axis=0)
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_get_his_data failed")
+
+# def ai_get_his_data2(
+#         model_id, model_version, algo, algo_version, module_id,
+#         from_time: datetime.datetime,
+#         to_time: datetime.datetime,
+#         ts2dt_col=None,
+#         return_type='df'
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param from_time: datetime 开始时间
+#     :param to_time: datetime 结束时间
+#     :param ts2dt_col: list timestamp需要转换为datetime的列名
+#     :param return_type: str='df' in {'dict', 'df'} 指定返回的数据结构
+#     'dict' 返回 {ts: DataFrame} (原始结构)
+#     'df' 返回各 DataFrame 加入 ts 时间列再按行拼合后的结果
+#     :return: DataFrame里面包含了ts, value
+#     """
+#     url = root_url + gethisdata_url
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#         "begin": round(from_time.timestamp()),
+#         "end": round(to_time.timestamp()),
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             data_resp = resp.json()['data']
+#             if data_resp:
+#                 res = dict()
+#                 for row in data_resp:
+#                     ts = dtfromts(row['ts'])
+#                     res[ts] = pd.DataFrame(json.loads(row['data']))
+#                     if ts2dt_col is not None:
+#                         res[ts] = ts2dt(res[ts], ts2dt_col)
+#                 if return_type == 'dict':
+#                     return res
+#                 elif return_type == 'df':
+#                     for ts in res.keys():
+#                         res[ts]['ts'] = ts
+#                     return pd.concat(res.values(), axis=0)
+#             else:
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_get_his_data failed")
+
+
+# # 将 DataFrame 格式数据转换为 jsonlike 的 list 格式数据
+# def df2jsonlike(df: pd.DataFrame):
+#     res = []
+#     for _, row in df.iterrows():
+#         res.append(row.to_dict())
+#     return res
+
+
+# # 将模型所预测的数据存入数据库
+# # 点位数据
+# def ai_put_ai_data(
+#         model_id, model_version, algo, algo_version, module_id,
+#         ts_dt: datetime.datetime,
+#         data_df: pd.DataFrame,
+#         dt2ts_col=None
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts_dt: datetime 业务发生时间,取用时的唯一可用时间戳
+#     :param data_df: DataFrame 业务数据
+#     :param dt2ts_col: list 业务数据中 datetime 列转换为 timestamp
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = upload_url + putaidata_url
+#     if dt2ts_col is not None:
+#         data_df = dt2ts(data_df, dt2ts_col)
+#     post_data = {
+#         "model_id": model_id,
+#         "model_version": model_version,
+#         "algo": algo,
+#         "algo_version": algo_version,
+#         "module_id": module_id,
+#         "ts": round(ts_dt.timestamp()),
+#         "data": df2jsonlike(data_df),
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {model_id}_{model_version}_{algo}_{algo_version}_{module_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_put_ai_data failed")
+#         return None
+
+
+# # 将模型所预测的数据存入数据库
+# # 能耗基线数据
+# def ai_put_ai_data2(
+#         model_id, model_version, algo, algo_version, module_id,
+#         ts_dt: datetime.datetime,
+#         data_df: pd.DataFrame,
+#         dt2ts_col=None
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts_dt: datetime 业务发生时间,取用时的唯一可用时间戳
+#     :param data_df: DataFrame 业务数据
+#     :param dt2ts_col: list 业务数据中 datetime 列转换为 timestamp
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = root_url + putaidata_url
+#     if dt2ts_col is not None:
+#         data_df = dt2ts(data_df, dt2ts_col)
+#     post_data = {
+#         "model_id"     : model_id,
+#         "model_version": model_version,
+#         "algo"         : algo,
+#         "algo_version" : algo_version,
+#         "module_id"    : module_id,
+#         "ts"           : round(ts_dt.timestamp()),
+#         "data"         : df2jsonlike(data_df) if isinstance(data_df, pd.DataFrame) else data_df,
+#     }
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url, data=json.dumps(post_data))
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {model_id}_{model_version}_{algo}_{algo_version}_{module_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_put_ai_data failed")
+#         return None
+
+
+# # 通过接口上传文件
+# def ai_upload_ai_file(
+#         model_id, algo, algo_version, module_id,
+#         file
+# ):
+#     """
+#     :param model_id: str 模型编号
+#     :param model_version: str 模型版本
+#     :param algo: str 算法名称
+#     :param algo_version: str 算法版本
+#     :param module_id: str 模块编号
+#     :param ts_dt: datetime 业务发生时间,取用时的唯一可用时间戳
+#     :param data_df: DataFrame 业务数据
+#     :param dt2ts_col: list 业务数据中 datetime 列转换为 timestamp
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = upload_url + uploadaifile_url
+#     post_data = {
+#         "model_id"    : str(model_id),
+#         "algo"        : str(algo),
+#         "algo_version": str(algo_version),
+#         "module_id"   : str(module_id),
+#         "file"        : file
+#     }
+
+#     multipart_encoder = MultipartEncoder(
+#         fields=post_data
+#     )
+
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url,headers={'Content-Type': multipart_encoder.content_type}, data=multipart_encoder)
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {model_id}_{algo}_{algo_version}_{module_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#             resp.close()
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_upload_ai_file failed")
+#         return None
+
+
+# # 上传数据至点位数据库
+# def ai_add_point_data(
+#         point_id, ts, value
+# ):
+#     """
+#     :param point_id: str 数据点位,需要预先在点位库中创建好
+#     :param timestamp: str 当前时刻的时间 datetime
+#     :param value: str 点位数值
+#     :return: dict: 数据服务返回值,可用 dict['state'] == 0 判断存数据是否成功
+#     """
+#     url = root_url + addpointdatum_url
+#     post_data = [{"point_id": str(point_id),
+#                   "data":[{"ts": int(ts.timestamp()), 
+#                            "value": str(value)}]}]
+
+#     rem_try = max_try
+#     while rem_try > 0:
+#         try:
+#             resp = requests.post(url=url,headers = {'Content-Type': 'application/json; charset=UTF-8'}, data = json.dumps(post_data))
+#             state = resp.json()['state']
+#             if state == 0:
+#                 print(f"\nput {point_id} data success!")
+#                 return resp.json()
+#             else:
+#                 print(f"strange resp: {resp.json()}")
+#                 rem_try -= 1
+#                 time.sleep(post_sleep)
+#             resp.close()
+#         except Exception as e:
+#             error_print(sys._getframe().f_code.co_name)
+#             rem_try -= 1
+#             time.sleep(post_sleep)
+#     if rem_try == 0:
+#         print("\nai_add_point_data failed")
+#         return None
+
+

+ 11 - 0
_opt/boundary/_utils/data_summary.py

@@ -0,0 +1,11 @@
+import pandas as pd 
+
+def summary_dataframe(df:pd.DataFrame,df_name:str):
+    with pd.option_context('display.max_rows', None,'display.max_columns', None,'display.width',500):
+        print('#'*20+f'   Data Summary : {df_name}   '+'#'*20)
+        print(df.describe().round(2).T)
+
+def print_dataframe(df:pd.DataFrame,df_name:str):
+    with pd.option_context('display.max_rows', None,'display.max_columns', None,'display.width',500):
+        print('#'*20+f'   Data : {df_name}   '+'#'*20)
+        print(df)

+ 190 - 0
_opt/boundary/_utils/datetime_func.py

@@ -0,0 +1,190 @@
+import datetime
+from dateutil import tz
+import time
+from functools import wraps
+import traceback
+import sys
+import threading
+
+
+# 获取带上海时区的当前时间,可指定以天为单位的延时,用于本地历史数据测试
+def get_now(delay_day=0):
+    """
+    :param delay_day: int 表示延时天数,只有测试历史数据时使用
+    :return: datetime
+    """
+    return datetime.datetime.now(tz=tz.gettz('Asia/Shanghai')) - datetime.timedelta(days=delay_day)
+
+
+# decorator 使被修饰任务整点运行,并处理异常
+def timed_exec(intv_timed: datetime.timedelta, round_mode, sleep=60):
+    """
+    被修饰任务的第一个参数必须为 now
+    且修饰后不用再向任务传入该参数,由装饰器向任务传送整点时间
+    :param intv_timed: timedelta 运行的时间间隔
+    :param round_mode: str 时间的取整方式
+    支持 {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :param sleep: int=60 当指定间隔未到,或出现异常,休眠指定时间,以秒为单位
+    :return: function
+    """
+    def timed_exec_deco(func):
+        @wraps(func)
+        def timed_execute_wraps(*args, **kwargs):
+            now = round_time(get_now(), round_mode) - intv_timed
+            while True:
+                try:
+                    now = round_time(time_block(now, intv_timed, sleep=sleep), round_mode)
+                    func(now, *args, **kwargs)
+                except Exception as e:
+                    print()
+                    print(f"{datetime.datetime.now()}:")
+                    print(f"when run {now} task an error occur!")
+                    print(f"Exception Info:")
+                    e_type, e_value, e_traceback = sys.exc_info()
+                    print(e_type)
+                    print(e_value)
+                    traceback.print_tb(e_traceback)
+                    print()
+                    time.sleep(sleep)
+        return timed_execute_wraps
+    return timed_exec_deco
+
+
+# decorator 使被修饰任务整点运行,并处理异常,且告知任务这次和上次的时间
+def timed_exec_last_now(intv_timed: datetime.timedelta, round_mode, bg_last_now='auto', sleep=60):
+    """
+    被修饰任务的前两个参数必须为 now,last_now,分别为本次运行时间和上次运行时间
+    且修饰后不用再向任务传入该参数,由装饰器向任务传送两个时间
+    :param intv_timed: timedelta 运行的时间间隔
+    :param round_mode: str 时间的取整方式
+    支持 {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :param bg_last_now: datetime 第一次执行时给的 last_now
+    default='auto' 自动向前减去 intv_timed 作为 last_now
+    :param sleep: int=60 当指定间隔未到,或出现异常,休眠指定时间,以秒为单位
+    :return: function
+    """
+    def timed_exec_deco(func):
+        @wraps(func)
+        def timed_execute_wraps(*args, **kwargs):
+            if bg_last_now == 'auto':
+                last_now = round_time(get_now(), round_mode) - intv_timed
+            else:
+                last_now = bg_last_now
+            while True:
+                try:
+                    now = round_time(time_block(last_now, intv_timed, sleep=sleep), round_mode)
+                    func(now, last_now, *args, **kwargs)
+                    last_now = now
+                except Exception as e:
+                    print()
+                    print(f"{datetime.datetime.now()}:")
+                    print(f"when run {now} task an error occur!")
+                    print(f"Exception Info:")
+                    e_type, e_value, e_traceback = sys.exc_info()
+                    print(e_type)
+                    print(e_value)
+                    traceback.print_tb(e_traceback)
+                    print()
+                    time.sleep(sleep)
+        return timed_execute_wraps
+    return timed_exec_deco
+
+
+# decorator 使被修饰任务整点运行,并处理异常,且在出错时会打印线程信息
+def timed_exec_multhd(intv_timed: datetime.timedelta, round_mode, sleep=60):
+    """
+    被修饰任务的第一个参数必须为 now
+    且修饰后不用再向任务传入该参数,由装饰器向任务传送整点时间
+    :param intv_timed: timedelta 运行的时间间隔
+    :param round_mode: str 时间的取整方式
+    支持 {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :param sleep: int=60 当指定间隔未到,或出现异常,休眠指定时间,以秒为单位
+    :return: function
+    """
+    def timed_exec_deco(func):
+        @wraps(func)
+        def timed_execute_wraps(*args, **kwargs):
+            now = round_time(get_now(), round_mode) - intv_timed
+            while True:
+                try:
+                    now = round_time(time_block(now, intv_timed, sleep=sleep), round_mode)
+                    func(now, *args, **kwargs)
+                except Exception as e:
+                    thread_curr = threading.currentThread()
+                    print()
+                    print(f"{datetime.datetime.now()}:")
+                    print(f"In {thread_curr.name} (target: {thread_curr._target}, args: {thread_curr._args})")
+                    print(f"when run {now} task an error occur!")
+                    print(f"Exception Info:")
+                    e_type, e_value, e_traceback = sys.exc_info()
+                    print(e_type)
+                    print(e_value)
+                    traceback.print_tb(e_traceback)
+                    print()
+                    time.sleep(sleep)
+        return timed_execute_wraps
+    return timed_exec_deco
+
+
+# 对 datetime 向前取整
+def round_time(t_: datetime.datetime, lvl=None):
+    """
+    :param t_: datetime
+    :param lvl: str=None 取整方式,目前支持
+     {None, 'sec', 'min', 'hou', 'day', '10min', '30min'}
+    :return: datetime or "No support"
+    """
+    if lvl is None:
+        return t_
+    round_lvl = "round_" + lvl
+    if round_lvl in globals():
+        return globals()[round_lvl](t_)
+    else:
+        return "No support"
+
+
+def round_sec(t_: datetime.datetime):
+    return t_ - datetime.timedelta(microseconds=t_.microsecond)
+
+
+def round_min(t_: datetime.datetime):
+    return round_sec(t_) - datetime.timedelta(seconds=t_.second)
+
+
+def round_hou(t_: datetime.datetime):
+    return round_min(t_) - datetime.timedelta(minutes=t_.minute)
+
+
+def round_day(t_: datetime.datetime):
+    return round_hou(t_) - datetime.timedelta(hours=t_.hour)
+
+
+def round_10min(t_: datetime.datetime):
+    t_ = round_min(t_)
+    return t_ - datetime.timedelta(minutes=t_.minute % 10)
+
+
+def round_30min(t_: datetime.datetime):
+    t_ = round_min(t_)
+    if t_.minute < 30:
+        return t_ - datetime.timedelta(minutes=t_.minute)
+    else:
+        return t_ - datetime.timedelta(minutes=t_.minute - 30)
+
+
+# 半成品的时间比较函数
+def time_check(t_, st, timed_thre):
+    if t_ - st >= timed_thre:
+        return True
+    else:
+        return False
+
+
+# 阻塞并不断检查,直到当前时间和指定时间的差大于指定间隔时,释放并返回当前时间
+def time_block(t_: datetime.datetime, timed_thre: datetime.timedelta, sleep=1):
+    while True:
+        now = get_now()
+        if now - t_ >= timed_thre:
+            return now
+        else:
+            time.sleep(sleep)

+ 136 - 0
_opt/boundary/_utils/point_reader.py

@@ -0,0 +1,136 @@
+from datetime import datetime
+import time
+
+try:
+    from workflowlib import requests
+except:
+    import requests
+
+import json
+import pandas as pd 
+
+
+
+class PointReader:
+    
+    def __init__(
+        self,
+        point_ids: list,
+        dt_begin : datetime,
+        dt_end   : datetime,
+        url      : str
+    ) -> None:
+    
+        self.point_ids = point_ids
+        self.dt_begin  = dt_begin
+        self.dt_end    = dt_end
+        self.url       = url
+    
+    def _read(self,dt_begin=None,dt_end=None) -> pd.DataFrame:
+        
+        dt_begin = self.dt_begin if dt_begin is None else dt_begin
+        dt_begin = dt_begin.replace(second=0,microsecond=0)
+        dt_end   = self.dt_end if dt_end is None else dt_end
+        dt_end   = dt_end.replace(second=0,microsecond=0)
+        
+        if dt_begin > dt_end:
+            raise Exception('开始时间晚于起始时间')
+        
+        ts_begin  = round(dt_begin.timestamp())
+        ts_end    = round(dt_end.timestamp())
+        
+        post_data = {
+            "point_ids": self.point_ids,
+            "begin"    : ts_begin,
+            "end"      : ts_end,
+            "interval" : 1,
+            "type"     : 3,
+        }
+        
+        try:
+            res       = requests.post(url=self.url, data=json.dumps(post_data))
+            time.sleep(0.1)
+            res_state = res.json()['state']
+        except:
+            print('post_data',post_data)
+            print('url',self.url)
+            print('res',res)
+            print('json',res.json())
+            raise Exception(res.json())
+        
+        if res_state != 0:
+            print('post_data',post_data)
+            raise Exception(res.json())
+        
+        point_df = []
+        for point_info in res.json()['data']:
+            point_id   = point_info['point_id']
+            point_data = point_info['data']
+            
+            if (point_data is None) or (len(point_data) == 0):
+                print('post_data',post_data)
+                print('res.json',res.json())
+                print('未获取到对应的点位数据, 检查时间是否正确')
+                continue
+            
+            df = pd.DataFrame(point_data).rename(columns={'value':point_id}).set_index('ts')
+            point_df.append(df)
+
+        if len(point_df) == 0:
+            print('post_data',post_data)
+            print('res.json',res.json())
+            print('所有点位的数据均未获取到, 需检查点位或接口')
+            return None
+        
+        data       = pd.concat(point_df,axis=1)
+        data.index = pd.to_datetime(data.index,unit='s',utc=True).tz_convert('Asia/Shanghai')
+        data       = data.tz_localize(tz=None)
+        
+        return data
+
+    def read_interval(self) -> pd.DataFrame:
+        interval = pd.date_range(start=self.dt_begin, end=self.dt_end, freq='1D').to_pydatetime().tolist()
+        interval += [self.dt_end]
+        
+        data = []
+        for idx in range(len(interval)):
+            if idx == len(interval)-1:
+                continue
+            start      = interval[idx]
+            end        = interval[idx+1]
+            finish_pct = round((idx+1)/(len(interval)-1) * 100,2)
+            print(f'获取第{idx}段数据({finish_pct}%),开始时间:{interval[idx]},结束时间:{interval[idx+1]}')
+            
+            data.append( self._read(dt_begin=start,dt_end=end) )
+        data = pd.concat(data,axis=0)
+        data = data.loc[~data.index.duplicated(keep='last'),:].sort_index()
+        
+        return data 
+    
+    def read_int_interval(self,freq='H') -> pd.DataFrame:
+        
+        if freq == 'H':
+            start = self.dt_begin.replace(minute=0)
+            end   = self.dt_end.replace(minute=0)
+        elif freq == 'D':
+            start = self.dt_begin.replace(hour=0,minute=0)
+            end   = self.dt_end.replace(hour=0,minute=0)
+        
+        start        = start.replace(second=0,microsecond=0)
+        end          = end.replace(second=0,microsecond=0)
+        int_interval = pd.date_range(start=start, end=end, freq=freq).to_pydatetime()
+        
+        if len(int_interval) == 0:
+            raise Exception(f'在指定的日期范围下没有获取到对应的时间点(start:{start},end:{end},freq:{freq})')
+        
+        # data = [self._read(dt_begin=dt,dt_end=dt) for dt in int_interval]
+        data = []
+        for idx,dt in enumerate(int_interval):
+            data.append(self._read(dt_begin=dt,dt_end=dt))
+            finish_pct = round((idx+1)/(len(int_interval)) * 100,2)
+            print(f'获取第{idx}段数据({finish_pct}%),时间:{dt}')
+            
+        data = pd.concat(data,axis=0)
+        data = data.loc[~data.index.duplicated(keep='last'),:].sort_index()
+        
+        return data 

+ 46 - 0
_opt/boundary/_utils/wf_cache.py

@@ -0,0 +1,46 @@
+from datetime import datetime
+from workflowlib.utils import cache as workflow_cache
+
+class WfCache:
+    
+    datetime_fmt = "%Y-%m-%d %H:%M:%S"
+    
+    def __init__(self) -> None:
+        return
+    
+    def convert_data_to_str(self,data):
+        if isinstance(data,(str,int,float)):
+            str_data = str(data)
+        elif isinstance(data,datetime):
+            str_data = datetime.strftime(data,self.datetime_fmt)
+        elif data is None:
+            str_data = ''
+        
+        return str_data
+    
+    def convert_str_to_data(self,str_data,data_type):
+        if str_data == '':
+            data = None
+            return data
+        if str_data is None:
+            return None
+        
+        if data_type == 'int':
+            data = int(str_data)
+        elif data_type == 'float':
+            data = float(str_data)
+        elif data_type == 'str':
+            data = str_data
+        elif data_type == 'datetime':
+            data = datetime.strptime(str_data,self.datetime_fmt)
+            
+        return data
+    
+    def read(self,key,data_type):
+        str_data = workflow_cache.get(key)
+        data = self.convert_str_to_data(str_data,data_type)
+        return data
+    
+    def write(self,key,data):
+        str_data = self.convert_data_to_str(data)
+        workflow_cache.set(key,str_data)

+ 182 - 0
_opt/boundary/main.py

@@ -0,0 +1,182 @@
+from typing import Union
+
+import pandas as pd
+from ._utils.config_info import ConfigInfo
+
+
+# 三种类型的边界
+# 1. 静态边界:
+#    - 内在的属性,应该是范围最宽的
+#    - 必定存在
+# 2. 动态边界:
+#    - 外在限制下的额外边界,边界两端已知
+#    - 非必要
+# 3. 动态目标:外在限制下的额外边界,边界中心已知
+#    - 通常目标值会使用当前值
+#    - 非必要
+#    - 当多个变量统一优化时,这些变量应该具有相同的边界,因此不适合将当前值作为目标值
+
+# 最终的边界由上述三种边界的交集决定,因此某个类型的边界越宽则约束越小
+
+# 对于非必要的边界,需要存在一种动态的方法能够使其失效
+#   动态边界:组件外部实现
+#   动态目标:与静态范围不重叠 / 组件输入的动态目标为False
+
+def main(*args,config = None) -> list:
+    
+    config_info   = ConfigInfo(config)
+    syn_opt       = config_info.get_property('syn_opt',default=False)      # 是否统一优化
+    var_type      = config_info.get_property('var_type',default=1)         # 变量的类型 0为连续变量,1为离散变量
+    var_type      = 0 if var_type is True else 1
+    var_precis    = config_info.get_property(key='var_precis',default=1)   # 变量的精度(精确到小数点后n位)
+    ub_static     = config['ub_static']                                    # 静态上边界
+    lb_static     = config['lb_static']                                    # 静态下边界
+    check_boundary(ub_static, lb_static)
+    ub_dyn_drift  = config_info.get_property('dynamic_ub_drift',default=0) # 动态上边界偏移
+    lb_dyn_drift  = config_info.get_property('dynamic_lb_drift',default=0) # 动态下边界偏移
+    adj_val       = config_info.get_property('adj_val')                    # 基于目标边界的调整半径
+    cur_as_target = config_info.get_property('cur_as_target',default=False)
+    
+    group_info    = config_info.get_io_group_info(io='in',type='data',data=args)
+    group_id_info = config_info.get_io_group_info(io='in',type='point_id')
+    output_id     = config_info.get_io_id('out')
+    
+    bound_id       = output_id[0]
+    
+    cur_val = get_cur_val(
+        cur_val_df  = group_info['当前值(必填)'],
+        cur_val_col = group_id_info['当前值(必填)'],
+        config_info = config_info
+    )
+    
+    ub_dynamic, lb_dynamic = get_dynamic_boundary(
+        ub_dyn_drift,
+        lb_dyn_drift,
+        group_info['动态边界(选填)'][0:2]
+    )
+    
+    boundary = [] # boundary = [{'var1':{...}}, {'var2':{...}}]
+    for cur_var_name in output_id:
+        
+        if syn_opt == False:
+            bound_id = cur_var_name
+        
+        if cur_var_name not in cur_val.columns:
+            raise Exception(f'组件输出的参数编号{cur_var_name}未在输入的数据中找到,输入的数据包含以下列{cur_val.columns}')
+        
+        # 获取动态目标边界
+        ub_trg, lb_trg = get_target_boundary(
+            adj_val       = adj_val,
+            cur_as_target = cur_as_target,
+            cur_var_value = cur_val.loc[:,cur_var_name].iloc[0],
+            ub_static     = ub_static,
+            lb_static     = lb_static,
+            syn_opt       = syn_opt,
+            input_trg     = group_info['动态边界(选填)'][2]
+        )
+            
+        # 多种类型的边界取交集
+        ub = min(ub_static,ub_dynamic,ub_trg)
+        lb = max(lb_static,lb_dynamic,lb_trg)
+        check_boundary(ub,lb)
+
+        boundary_info = {
+            'id'           : bound_id,                        # 标记"组" 来源于哪个边界组件
+            'var_name'     : cur_var_name,                    # 标记组内名称(优化变量名称)
+            'ub'           : ub,                              # 组内相同
+            'lb'           : lb,                              # 组内相同
+            'var_type'     : var_type,                        # 组内相同
+            'var_precis'   : var_precis,                      # 组内相同
+            'boundary_type': 'interval',                      # 组内相同
+            'cur_val'      : cur_val.loc[:,[cur_var_name]]    # 组内不同
+        }
+        
+        boundary.append(boundary_info)
+        
+    return boundary if len(boundary)>1 else boundary[0]
+
+def get_cur_val(
+    cur_val_df:list,
+    cur_val_col:list,
+    config_info
+) -> pd.DataFrame:
+    # 检验组件输入的数据类型是否为DataFrame
+    
+    for cur_id,cur_df in zip(cur_val_col,cur_val_df):
+        if not isinstance(cur_df,pd.DataFrame):
+            cur_df_type = type(cur_df)
+            raise Exception(f'组件的输入({cur_id})不是DataFrame,是{cur_df_type}({cur_df})')
+    
+    cur_val         = pd.concat(cur_val_df,axis=1)
+    cur_val.columns = cur_val_col
+    
+    # 校验组件输入的当前值与组件的输出是否一致
+    if cur_val.columns.to_list() != config_info.get_io_id(io='out'):
+        raise Exception(f'组件输入的当前值与组件输出不匹配,组件的当前值为:{cur_val.columns.to_list()},组件的输出为:{config_info.get_io_id(io="out")}')
+    
+    return cur_val
+
+def get_dynamic_boundary(ub_dynamic_drift,lb_dynamic_drift,dyn_input):
+    for df in dyn_input:
+        if (not isinstance(df,pd.DataFrame)) and (df is not None):
+            raise Exception(f'动态边界的输入必须是DataFrame或None,获取到的输入是{type(df)}')
+ 
+    ub_input = dyn_input[0].iat[0,0] if isinstance(dyn_input[0],pd.DataFrame) else None
+    lb_input = dyn_input[1].iat[0,0] if isinstance(dyn_input[1],pd.DataFrame) else None
+    
+    ub_dynamic = ub_input + ub_dynamic_drift if ub_input is not None else 1e100
+    lb_dynamic = lb_input + lb_dynamic_drift if lb_input is not None else -1e100
+    
+    check_boundary(ub_dynamic,lb_dynamic)
+    
+    return ub_dynamic,lb_dynamic
+
+def get_target_boundary(
+    adj_val      : float,
+    cur_as_target: bool,
+    cur_var_value: float,
+    ub_static    : float,
+    lb_static    : float,
+    syn_opt      : bool,
+    input_trg    : Union[pd.DataFrame,bool]
+    ):
+    # 基于目标值调整的边界调整
+    
+    default_ub_trg = 1e100
+    default_lb_trg = -1e100
+    
+    # 该边界失效的情况
+    # 1. 没有输入adj_val
+    # 2. 目标边界与静态边界完全不重叠
+    # 3. 输入的目标值为False
+    
+    if adj_val is None:
+        return default_ub_trg, default_lb_trg
+    
+    if isinstance(input_trg,bool) and input_trg == False:
+        return default_ub_trg, default_lb_trg
+    
+    if cur_as_target == True and syn_opt == True:
+        raise Exception('当开启统一优化时,不能将当前变量设为目标变量,需要额外指定目标变量')
+    
+    # 获取目标值
+    if cur_as_target == True:
+        target_val    = cur_var_value
+    elif cur_as_target == False and isinstance(input_trg,pd.DataFrame) and len(input_trg)==1:
+        target_val = input_trg.iat[0,0]
+    else:
+        raise Exception(f'输入的动态目标值有误:{input_trg}')
+    
+    ub_trg  = target_val + adj_val
+    lb_trg  = target_val - adj_val
+    check_boundary(ub_trg,lb_trg)
+    
+    if (ub_trg < lb_static) or (lb_trg > ub_static):
+        return default_ub_trg, default_lb_trg
+    
+    return ub_trg,lb_trg
+
+def check_boundary(ub,lb):
+    if ub < lb:
+        raise ValueError(f'变量的上限({ub})<下限({lb}),需检查设定的边界值范围')
+    

+ 32 - 0
_opt/boundary/sim_config.py

@@ -0,0 +1,32 @@
+def simulate_config(
+    opt_var         : list,
+    syn_opt         : bool,
+    var_type        : bool,
+    lb_static       : float,
+    ub_static       : float,
+    var_precis      : int=1,
+    dynamic_lb_drift: float=0,
+    dynamic_ub_drift: float=0,
+    adj_val         : float = None,
+):
+    
+    input_var_id = ['静态上边界(选填)','静态下边界(选填)','动态目标值(选填)'] + opt_var
+    
+    config = {
+        'adj_val'         : adj_val,
+        'syn_opt'         : syn_opt,
+        'var_type'        : var_type,
+        'lb_static'       : lb_static,
+        'ub_static'       : ub_static,
+        'var_precis'      : var_precis,
+        'dynamic_lb_drift': dynamic_lb_drift,
+        'dynamic_ub_drift': dynamic_ub_drift,
+        '_PORTS_IN'       : [{'point_id':p_id} for p_id in input_var_id],
+        '_PORTS_OUT'      : [{'point_id':p_id} for p_id in opt_var],
+        '_PORTS_IN_GROUP': [
+            {'end': 3, 'name': '动态边界(选填)', 'start': 0}, 
+            {'end': len(input_var_id), 'name': '当前值(必填)',   'start': 3}
+        ], 
+    }
+
+    return config 

+ 39 - 0
components/_base_components.py

@@ -0,0 +1,39 @@
+import numpy as np
+try:
+    import pymc as pm
+except:
+    pass
+
+from .._model._base import BaseModel
+
+class BaseComponents(BaseModel):
+    
+    CONSTANT = {
+        'T_offset' : 273.15,
+        'h_ads'    : 2200,   # kJ/kg 吸附热(文献中硅胶约为2100–2300 kJ/kg,接近水蒸气冷凝潜热)
+        'c_p_air'  : 1.05,   # kJ/kg·K 空气比热容
+        'c_p_v'    : 1.84,   # kJ/kg·K 水蒸气比热容
+        'c_p_water': 4.18,   # kJ/kg·K 水比热容(约4.18)
+        'rho_air'  : 1.184,  # kg/m3 取了25度下的空气密度
+    }
+    
+    def __init__(self) -> None:
+        super().__init__()
+    
+    def get_func_by_engine(engine:str) -> dict:
+        if engine == 'pymc':
+            EXP   = pm.math.exp
+            WHERE = pm.math.switch
+            GT    = pm.math.gt
+            LT    = pm.math.lt
+        else:
+            EXP   = np.exp
+            WHERE = np.where
+            GT    = np.greater
+            LT    = np.less
+        return {
+            'EXP'   : EXP,
+            'WHERE' : WHERE,
+            'GT'    : GT,
+            'LT'    : LT,
+        }

+ 225 - 0
components/coil.py

@@ -0,0 +1,225 @@
+import numpy as np
+try:
+    import pymc as pm
+except:
+    pass
+
+from ._base_components import BaseComponents
+from ..tools.enthalpy import get_Dew_from_HumRatio,get_Enthalpy_from_Tdb_and_HumRatio
+from ..tools.enthalpy import get_HumRatio_from_Dew
+
+
+class CoolingCoil(BaseComponents):
+    
+    def __init__(self, engine):
+        super().__init__(engine)
+    
+    @classmethod
+    def model(
+        cls,
+        TinA,HinA,FA,
+        TinW,FW,
+        engine,
+        param
+    ):
+        FUNC  = cls.get_func_by_engine(engine)
+        WHERE = FUNC['WHERE']
+        GT    = FUNC['GT']
+        EXP   = FUNC['EXP']
+        
+        DinA = get_Dew_from_HumRatio(HinA,engine)
+        mode = WHERE(GT(DinA-TinW,0),1,0) #1湿工况
+        
+        UA_dry       = param['UA_dry']
+        UA_wet_ratio = param['UA_wet_ratio']
+        Ts_adj       = param['Ts_adj']
+        eta          = param['eta']
+        UA_wet       = UA_dry * UA_wet_ratio
+        
+        C_min   = FA * cls.CONSTANT['c_p_air']
+        C_max   = FW * cls.CONSTANT['c_p_water']
+        Cr      = C_min / C_max
+        NTU_wet = UA_wet / C_min
+        NTU_dry = UA_dry / C_min
+        NTU     = WHERE(mode,NTU_wet,NTU_dry)
+        epsilon = (1-EXP(-NTU*(1-Cr))) / (1-Cr*EXP(-NTU*(1-Cr)))
+        
+        # 出风干球温度
+        Ts    = TinW + Ts_adj * (HinA) * (TinA - TinW) # 湿工况盘管表面温度
+        Q_wet = epsilon * C_min * (TinA - Ts)
+        Q_dry = epsilon * C_min * (TinA - TinW)
+        Q     = WHERE(mode,Q_wet,Q_dry)
+        ToutA = TinA - Q / (FA * cls.CONSTANT['c_p_air'])
+        
+        # 出风湿度+露点
+        HoutA = WHERE(
+            mode,
+            HinA - eta * (HinA - get_HumRatio_from_Dew(TinW,engine)),
+            HinA
+        )
+        DoutA = get_Dew_from_HumRatio(HoutA,engine)
+        
+        return {'ToutA':ToutA,'HoutA':HoutA,'DoutA':DoutA} 
+
+    @classmethod
+    def prior(cls,name) -> dict:
+        param = {
+            'UA_dry'      : pm.HalfNormal(f'{name}_UA_dry',5,initval=10),
+            'UA_wet_ratio': pm.TruncatedNormal(f'{name}_UA_wet_ratio',mu=4,sigma=0.2,lower=1,upper=10,initval=4),
+            'Ts_adj'      : pm.HalfNormal(f'{name}_Ts_adj',5,initval=5),
+            'eta'         : pm.HalfNormal(f'{name}_eta',0.5,initval=0.5),
+        }
+        return param
+
+
+class CoolingCoil2(BaseComponents):
+    
+    def __init__(self, engine):
+        super().__init__(engine)
+    
+    @classmethod
+    def model(
+        cls,
+        TinA,HinA,FA,
+        TinW,FW,
+        engine,
+        param
+    ):
+        FUNC  = cls.get_func_by_engine(engine)
+        WHERE = FUNC['WHERE']
+        EXP   = FUNC['EXP']
+        
+        DinA = get_Dew_from_HumRatio(HinA,engine)
+        
+        beta1_T     = param['beta1_T']
+        beta2_T     = param['beta2_T']
+        beta3_T     = param['beta3_T']
+        beta1_D_adj = param['beta1_D_adj']
+        beta2_D_adj = param['beta2_D_adj']
+        beta3_D_adj = param['beta3_D_adj']
+        beta4_D     = param['beta4_D']
+        
+        beta1_D = beta1_T * beta1_D_adj
+        beta2_D = beta2_T * beta2_D_adj
+        beta3_D = beta3_T * beta3_D_adj
+        
+        EFF_T_FW = 1 - EXP(-beta2_T * FW)
+        EFF_T_FA = EXP(-beta3_T * FA)
+        EFF_D_FW = 1 - EXP(-beta2_D * FW)
+        EFF_D_FA = EXP(-beta3_D * FA)
+        mode     = WHERE(DinA > TinW,1,0)
+        ToutA    = TinA - beta1_T * (TinA - TinW) * EFF_T_FA * EFF_T_FW
+        DoutA    = DinA - (beta1_D - beta4_D * (TinA - DinA)) * (DinA - TinW) * mode * EFF_D_FW * EFF_D_FA
+        HoutA    = get_HumRatio_from_Dew(DoutA,engine)
+        
+        return {'ToutA':ToutA,'HoutA':HoutA,'DoutA':DoutA,'FA':FA} 
+    
+    @classmethod
+    def prior(cls,name) -> dict:
+        param = {
+            'beta1_T'    : pm.TruncatedNormal(f'{name}_beta1_T',mu=1,sigma=0.2,lower=0),
+            'beta2_T'    : pm.TruncatedNormal(f'{name}_beta2_T',mu=5,sigma=3,lower=0),
+            'beta3_T'    : pm.TruncatedNormal(f'{name}_beta3_T',mu=5,sigma=0.3,lower=0),
+            'beta1_D_adj': pm.TruncatedNormal(f'{name}_beta1_D_adj',mu=1,sigma=0.1,lower=0.1),
+            'beta2_D_adj': pm.TruncatedNormal(f'{name}_beta2_D_adj',mu=1,sigma=0.1,lower=0.1),
+            'beta3_D_adj': pm.TruncatedNormal(f'{name}_beta3_D_adj',mu=1,sigma=0.1,lower=0.1),
+            'beta4_D'    : pm.TruncatedNormal(f'{name}_beta4_D',mu=0.01,sigma=0.01,lower=0)
+        }
+        return param
+
+
+class SteamCoilFs(BaseComponents):
+    def __init__(self):
+        super().__init__()
+    
+    @classmethod
+    def model(
+        cls,
+        TinA,ToutA,FA,
+        param,
+        engine
+    ):
+        b1 = param['b1']
+        b2 = param['b2']
+        b3 = param['b3']
+        Fs = b1 * (ToutA - TinA) * FA + b2 * TinA + b3
+        return {'Fs':Fs}
+    
+    @classmethod
+    def prior(cls,name):
+        param = {
+            'b1': pm.HalfNormal(f'{name}_b1',sigma=10),
+            'b2': pm.Normal(f'{name}_b2',sigma=10),
+            'b3': pm.Normal(f'{name}_b3',sigma=10),
+        }
+        return param
+
+
+class SteamCoilFs2(BaseComponents):
+    def __init__(self):
+        super().__init__()
+    
+    @classmethod
+    def model(cls,TinA,ToutA,FA,param,engine):
+        b1 = param['b1']
+        b2 = param['b2']
+        FP = np.exp(b1 * (ToutA - TinA)*FA + b2)
+        
+        FUNC  = cls.get_func_by_engine(engine)
+        WHERE = FUNC['WHERE']
+        LT    = FUNC['LT']
+        
+        param_change_point    = param['change_point']
+        param_b1              = param['a1']
+        param_b2              = param['a2']
+        param_b3              = param['a3']
+        param_Ps_change_point = param_b1 + param_b2 * param_change_point
+        Fs = WHERE(
+            LT(FP,param_Ps_change_point * param_change_point),
+            get_root(param_b1,param_b2,FP),
+            get_root(param_b1+(param_b2-param_b3)*param_change_point,param_b3,FP)
+        )
+        return {'FP':FP,'Fs':Fs}
+
+    @classmethod
+    def prior(cls,name):
+        param = {
+            'b1'          : pm.Normal(f'{name}_b1',mu=0.08,sigma=0.1,initval=0.08),
+            'b2'          : pm.Normal(f'{name}_b2',mu=3.65,sigma=0.02,initval=3.65),
+            'change_point': pm.Normal(f'{name}_change_point',mu=104,sigma=1,initval=104),
+            'a1'          : pm.Normal(f'{name}_a1',mu=17.5,sigma=1,initval=17.5),
+            'a2'          : pm.Normal(f'{name}_a2',mu=0.2,sigma=0.1,initval=0.2),
+            'a3'          : pm.Normal(f'{name}_a3',mu=1.5,sigma=0.1,initval=1.5),
+        }
+        return param
+
+class SteamCoilFs3(BaseComponents):
+    def __init__(self):
+        super().__init__()
+    
+    @classmethod
+    def model(
+        cls,
+        TinA,ToutA,FA,
+        HinA,HoutA,
+        param,
+        engine
+    ):
+        b1   = param['b1']
+        b2   = param['b2']
+        b3   = param['b3']
+        Fs   = b1 * (ToutA - TinA) + b2 * ToutA + b3
+        return {'Fs':Fs}
+    
+    @classmethod
+    def prior(cls,name):
+        param = {
+            'b1': pm.HalfNormal(f'{name}_b1',sigma=10,initval=1),
+            'b2': pm.HalfNormal(f'{name}_b2',sigma=10,initval=1),
+            'b3': pm.Normal(f'{name}_b3',sigma=10,initval=0),
+        }
+        return param
+
+
+def get_root(b0,b1,FP):
+    return (np.sqrt(b0**2+4*b1*FP)-b0)/(2*b1)

+ 24 - 0
components/mixed.py

@@ -0,0 +1,24 @@
+from ._base_components import BaseComponents
+from ..tools.enthalpy import get_Dew_from_HumRatio
+
+class Mixed(BaseComponents):
+    
+    def __init__(self):
+        super().__init__()
+    
+    @classmethod
+    def model(
+        cls,
+        TinA,HinA,FA,
+        TinM,HinM,FM,
+        engine
+    ):
+        # FUNC  = cls.get_func_by_engine(engine)
+        # WHERE = FUNC['WHERE']
+        # FA    = WHERE(FA > 0,FA,0)
+        # FM    = WHERE(FM > 0,FM,0)
+        ToutA = (TinA * FA + TinM * FM) / (FA + FM)
+        HoutA = (HinA * FA + HinM * FM) / (FA + FM)
+        DoutA = get_Dew_from_HumRatio(HoutA,engine)
+        return {'ToutA':ToutA,'HoutA':HoutA,'DoutA':DoutA,'FA':FA,'FM':FM}
+    

+ 234 - 0
components/wheel.py

@@ -0,0 +1,234 @@
+import numpy as np
+try:
+    import pymc as pm
+except:
+    pass
+
+from ._base_components import BaseComponents
+from ..tools.enthalpy import get_RH_from_Tdb_and_Hr
+from ..tools.enthalpy import get_Dew_from_HumRatio
+
+class WheelS2(BaseComponents):
+    
+    def __init__(self):
+        super().__init__()
+    
+    @classmethod
+    def model(
+        cls,
+        TinP,HinP,FP,
+        TinR,HinR,FR,
+        engine  : str,
+        param   : dict,
+    ):
+        FUNC = cls.get_func_by_engine(engine)
+        EXP  = FUNC['EXP']
+        RinP = get_RH_from_Tdb_and_Hr(TinP,HinP,engine)
+        
+        beta_Q1 = param['beta_Q1']
+        beta_H1 = param['beta_H1']
+        beta_H2 = param['beta_H2']
+        beta_H3 = param['beta_H3']
+        beta_H4 = param['beta_H4']
+        
+        # 转轮的温度
+        T_avg = TinR * (1 - beta_H4) + TinP * beta_H4
+        
+        # 出风露点(除湿量)
+        ## 处理侧
+        Hdiff_mu = beta_H1 * RinP ** beta_H2 * EXP(-beta_H3 / T_avg) / 1000 # kg水蒸气/kg干空气
+        HoutP_mu = HinP - Hdiff_mu
+        DoutP_mu = get_Dew_from_HumRatio(HoutP_mu,engine)
+        ## 再生侧
+        HoutR_mu = HinR + Hdiff_mu * (FP / FR)
+        DoutR_mu = get_Dew_from_HumRatio(HoutR_mu,engine)
+        
+        # 出风温度
+        # 处理侧
+        Q_latent   = Hdiff_mu * cls.CONSTANT['h_ads'] # Kj/kg 潜热
+        Q_sensible = beta_Q1 * (TinR - TinP)          # Kj/kg 显热(影响升焓的部分)
+        Q          = (Q_latent + Q_sensible) * FP     # Kj
+        ToutP_mu   = TinP + Q / (FP * cls.CONSTANT['c_p_air'])
+        # 再生侧
+        ToutR_mu   = TinR - Q / (FR * cls.CONSTANT['c_p_air'])
+        
+        return {
+            'ToutP':ToutP_mu,'HoutP':HoutP_mu,'DoutP':DoutP_mu,
+            'ToutR':ToutR_mu,'HoutR':HoutR_mu,'DoutR':DoutR_mu,
+        }
+    
+    @classmethod
+    def prior(cls,name):
+        param = {
+            'beta_Q1': pm.HalfNormal(f'{name}_beta_Q1',sigma=10),
+            'beta_H1': pm.HalfNormal(f'{name}_beta_H1',sigma=10),
+            'beta_H2': pm.HalfNormal(f'{name}_beta_H2',sigma=10),
+            'beta_H3': pm.HalfNormal(f'{name}_beta_H3',sigma=10),
+            'beta_H4': pm.Uniform(f'{name}_beta_H4',lower=0,upper=1),
+        }
+        return param
+
+
+class WheelS3(BaseComponents):
+    def __init__(self):
+        super().__init__()
+    
+    @classmethod
+    def model(
+        cls,
+        TinP,HinP,FP,
+        TinR,HinR,FR,
+        TinC,HinC,FC,
+        engine  : str,
+        param   : dict,
+    ):
+        FUNC = cls.get_func_by_engine(engine)
+        EXP  = FUNC['EXP']
+        
+        beta_P1 = param['beta_P1']
+        beta_P2 = param['beta_P2']
+        beta_P3 = param['beta_P3']
+        beta_P4 = param['beta_P4']
+        beta_P5 = param['beta_P5']
+        beta_C1 = param['beta_C1']
+        beta_C2 = param['beta_C2']
+        beta_C3 = param['beta_C3']
+        beta_C4 = param['beta_C4']
+        
+        RinP = get_RH_from_Tdb_and_Hr(TinP,HinP,engine)
+        RinC = get_RH_from_Tdb_and_Hr(TinC,HinC,engine)
+        
+        # 处理侧
+        HdiffP  = (beta_P1 * RinP**beta_P4 * HinP * TinR * EXP(-beta_P5 * FP) + beta_P2)/1000
+        WdiffP  = HdiffP * FP
+        HoutP   = HinP - HdiffP
+        DoutP   = get_Dew_from_HumRatio(HoutP,engine)
+        Q_lat_P = WdiffP * cls.CONSTANT['h_ads']
+        Q_sen_P = beta_P3 * (TinR - TinP) * FP #TODO
+        TdiffP  = (Q_lat_P + Q_sen_P) / (FP * cls.CONSTANT['c_p_air'])
+        ToutP   = TinP + TdiffP
+        
+        # 冷却侧
+        TdiffC    = beta_C1 * EXP(-beta_C2 * EXP(-beta_C3 * (TinR - TinC))) * EXP(-beta_C4 * FC)
+        ToutC     = TinC + TdiffC
+        HdiffC    = (beta_P1 * RinC**beta_P4 * HinC * TinR * EXP(-beta_P5 * FC) + beta_P2)/1000
+        WdiffC    = HdiffC * FC
+        HoutC     = HinC - HdiffC
+        DoutC     = get_Dew_from_HumRatio(HoutC,engine)
+        Q_total_C = TdiffC * FC * cls.CONSTANT['c_p_air']
+        
+        # 再生侧
+        WdiffR    = WdiffP + WdiffC
+        HoutR     = (HinR * FR + WdiffR) / FR
+        DoutR     = get_Dew_from_HumRatio(HoutR,engine)
+        Q_total_R = Q_lat_P + Q_sen_P + Q_total_C
+        TdiffR    = Q_total_R / (FR * cls.CONSTANT['c_p_air'])
+        ToutR     = TinR - TdiffR
+        
+        return {
+            'ToutP':ToutP,'HoutP':HoutP,'DoutP':DoutP,'FP':FP,
+            'ToutR':ToutR,'HoutR':HoutR,'DoutR':DoutR,'FR':FR,
+            'ToutC':ToutC,'HoutC':HoutC,'DoutC':DoutC,'FC':FC,
+        } 
+        
+    @classmethod
+    def prior(cls,name):
+        param = {
+            'beta_P1': pm.TruncatedNormal(f'{name}_beta_P1',mu=5,sigma=10,initval=5,lower=0),
+            'beta_P2': pm.TruncatedNormal(f'{name}_beta_P2',mu=0.5,sigma=1,initval=0.02,lower=0),
+            'beta_P3': pm.TruncatedNormal(f'{name}_beta_P3',mu=1,sigma=2,initval=1.5,lower=0),
+            'beta_P4': pm.TruncatedNormal(f'{name}_beta_P4',mu=1,sigma=0.3,initval=1,lower=0),
+            'beta_P5': pm.TruncatedNormal(f'{name}_beta_P5',mu=5,sigma=2,initval=5,lower=0),
+            'beta_C1': pm.TruncatedNormal(f'{name}_beta_C1',mu=60,sigma=10,initval=60,lower=10),
+            'beta_C2': pm.TruncatedNormal(f'{name}_beta_C2',mu=30,sigma=10,initval=30,lower=1),
+            'beta_C3': pm.TruncatedNormal(f'{name}_beta_C3',mu=0.05,sigma=0.1,initval=0.05,lower=0),
+            'beta_C4': pm.TruncatedNormal(f'{name}_beta_C4',mu=1,sigma=1,initval=1,lower=0),
+        }
+        return param
+
+
+
+# class WheelS3(BaseComponents):
+#     # 需要校准的输出 ToutP DoutP ToutR/ToutC
+    
+#     def __init__(self):
+#         super().__init__()
+    
+#     @classmethod
+#     def model(
+#         cls,
+#         TinP,HinP,FP,
+#         TinR,HinR,FR,
+#         TinC,HinC,FC,
+#         engine  : str,
+#         param   : dict,
+#     ):
+#         FUNC = cls.get_func_by_engine(engine)
+#         EXP  = FUNC['EXP']
+#         RinP = get_RH_from_Tdb_and_Hr(TinP,HinP,engine)
+#         RinC = get_RH_from_Tdb_and_Hr(TinC,HinC,engine)
+        
+#         beta_Q1 = param['beta_Q1']
+#         beta_Q2 = param['beta_Q2']
+#         # beta_Q3 = param['beta_Q3']
+#         beta_H1 = param['beta_H1']
+#         beta_H2 = param['beta_H2']
+#         beta_H3 = param['beta_H3']
+#         beta_H4 = param['beta_H4']
+#         beta_H5 = param['beta_H5']
+        
+#         # 转轮的温度
+#         T_avg = TinR * beta_H4 + TinC * (1 - beta_H4)
+        
+#         # 除湿量
+#         ## 处理侧
+#         H_diff_P = beta_H1 * RinP ** beta_H2 * EXP(-beta_H3 / T_avg)   # 描述吸附  kg水蒸气/kg干空气
+#         W_diff_P = H_diff_P * FP                                       # kg水蒸气
+#         HoutP    = HinP - H_diff_P
+#         # 冷却测
+#         # !转轮的处理侧和冷却侧都在除湿,由于冷却侧出来的空气的温度非常高,所以湿度很难测准
+#         # !因此冷却侧的除湿量需要假定,这里假定和处理侧参数一致
+#         H_diff_C = beta_H1 * RinC ** beta_H2 * EXP(-beta_H3 / T_avg) * beta_H5
+#         W_diff_C = H_diff_C * FC
+#         HoutC    = HinC - H_diff_C
+#         ## 再生侧
+#         HoutR = (HinR * FR + W_diff_P + W_diff_C) / FR # kg水蒸气/kg干空气
+        
+#         # 露点
+#         DoutP = get_Dew_from_HumRatio(HoutP,engine)
+#         DoutC = get_Dew_from_HumRatio(HoutC,engine)
+#         DoutR = get_Dew_from_HumRatio(HoutR,engine)
+        
+#         # 热量
+#         Q_lat_C     = W_diff_C * cls.CONSTANT['h_ads']                   # Kj 冷却侧潜热
+#         Q_lat_P     = W_diff_P * cls.CONSTANT['h_ads']                   # Kj 处理侧潜热
+#         Q_lat_total = Q_lat_C + Q_lat_P                                  # Kj 总潜热
+#         Q_sen_C     = beta_Q1 * (TinR - TinC)                            # Kj 冷却侧显热
+#         # Q_sen_P     = beta_Q1 * (TinR - beta_Q2 * (TinR - TinC) - TinP)  # Kj 处理侧显热
+#         Q_sen_P     = beta_Q1 * (TinR - TinP) * beta_Q2
+#         Q_sen_Total = Q_sen_C + Q_sen_P                                  # Kj 总显热
+        
+#         # 温度变化
+#         ToutP = TinP + (Q_lat_P + Q_sen_P) / (FP * cls.CONSTANT['c_p_air'])
+#         ToutC = TinC + (Q_lat_C + Q_sen_C) / (FC * cls.CONSTANT['c_p_air']) 
+#         ToutR = TinR - (Q_lat_total + Q_sen_Total) / (FR * cls.CONSTANT['c_p_air'])
+        
+#         return {
+#             'ToutP':ToutP,'HoutP':HoutP,'DoutP':DoutP,
+#             'ToutR':ToutR,'HoutR':HoutR,'DoutR':DoutR,
+#             'ToutC':ToutC,'HoutC':HoutC,'DoutC':DoutC,
+#         } 
+    
+#     @classmethod
+#     def prior(cls,name):
+#         param = {
+#             'beta_Q1': pm.HalfNormal(f'{name}_beta_Q1',sigma=1,initval=1),
+#             'beta_Q2': pm.HalfNormal(f'{name}_beta_Q2',sigma=0.1,initval=0.01),
+#             'beta_H1': pm.HalfNormal(f'{name}_beta_H1',sigma=1,initval=0.01),
+#             'beta_H2': pm.HalfNormal(f'{name}_beta_H2',sigma=1,initval=0.01),
+#             'beta_H3': pm.HalfNormal(f'{name}_beta_H3',sigma=20,initval=100),
+#             'beta_H4': pm.Uniform(f'{name}_beta_H4',lower=0,upper=1,initval=0.7),
+#             # 'beta_H5': pm.TruncatedNormal(f'{name}_beta_H5',mu=5,sigma=2,lower=1,initval=5),
+#             'beta_H5':pm.HalfNormal(f'{name}_beta_H5',sigma=1,initval=0.01)
+#         }
+#         return param

+ 110 - 0
doc/整体框架.drawio

@@ -0,0 +1,110 @@
+<mxfile host="65bd71144e">
+    <diagram id="A_9PUjzuGg2k4Z0ytOuo" name="Page-1">
+        <mxGraphModel dx="1380" dy="851" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
+            <root>
+                <mxCell id="0"/>
+                <mxCell id="1" parent="0"/>
+                <mxCell id="9" style="edgeStyle=none;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;" edge="1" parent="1" source="6" target="7">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="30" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="6" target="7">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="6" value="前表冷" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
+                    <mxGeometry x="100" y="290" width="60" height="120" as="geometry"/>
+                </mxCell>
+                <mxCell id="25" style="edgeStyle=none;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="7" target="23">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="31" style="edgeStyle=none;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="7" target="11">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="7" value="前转轮" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#e1d5e7;strokeColor=#9673a6;" vertex="1" parent="1">
+                    <mxGeometry x="260" y="50" width="60" height="400" as="geometry"/>
+                </mxCell>
+                <mxCell id="15" style="edgeStyle=none;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;" edge="1" parent="1" source="10" target="11">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="10" value="中表冷" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
+                    <mxGeometry x="500" y="290" width="60" height="120" as="geometry"/>
+                </mxCell>
+                <mxCell id="16" style="edgeStyle=none;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="11" target="12">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="36" style="edgeStyle=none;html=1;exitX=0;exitY=0.25;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="11" target="32">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="38" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="11" target="37">
+                    <mxGeometry relative="1" as="geometry">
+                        <Array as="points">
+                            <mxPoint x="930" y="250"/>
+                            <mxPoint x="930" y="150"/>
+                        </Array>
+                    </mxGeometry>
+                </mxCell>
+                <mxCell id="11" value="后转轮" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#e1d5e7;strokeColor=#9673a6;" vertex="1" parent="1">
+                    <mxGeometry x="660" y="50" width="60" height="400" as="geometry"/>
+                </mxCell>
+                <mxCell id="21" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="12" target="13">
+                    <mxGeometry relative="1" as="geometry">
+                        <Array as="points">
+                            <mxPoint x="1010" y="350"/>
+                            <mxPoint x="1010" y="545"/>
+                        </Array>
+                    </mxGeometry>
+                </mxCell>
+                <mxCell id="12" value="后表冷" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#dae8fc;strokeColor=#6c8ebf;" vertex="1" parent="1">
+                    <mxGeometry x="820" y="290" width="60" height="120" as="geometry"/>
+                </mxCell>
+                <mxCell id="27" style="edgeStyle=orthogonalEdgeStyle;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="13" target="23">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="13" value="车间" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#fff2cc;strokeColor=#d6b656;" vertex="1" parent="1">
+                    <mxGeometry x="460" y="480" width="510" height="130" as="geometry"/>
+                </mxCell>
+                <mxCell id="26" style="edgeStyle=none;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="23" target="10">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="23" value="混风" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#1ba1e2;fontColor=#ffffff;strokeColor=#006EAF;" vertex="1" parent="1">
+                    <mxGeometry x="370" y="325" width="50" height="50" as="geometry"/>
+                </mxCell>
+                <mxCell id="35" style="edgeStyle=none;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.25;entryDx=0;entryDy=0;" edge="1" parent="1" source="28" target="7">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="28" value="前再生加热盘管" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f8cecc;strokeColor=#b85450;" vertex="1" parent="1">
+                    <mxGeometry x="400" y="90" width="60" height="120" as="geometry"/>
+                </mxCell>
+                <mxCell id="34" style="edgeStyle=none;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="32" target="28">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="32" value="混风" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#1ba1e2;fontColor=#ffffff;strokeColor=#006EAF;" vertex="1" parent="1">
+                    <mxGeometry x="540" y="125" width="50" height="50" as="geometry"/>
+                </mxCell>
+                <mxCell id="39" style="edgeStyle=none;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.25;entryDx=0;entryDy=0;" edge="1" parent="1" source="37" target="11">
+                    <mxGeometry relative="1" as="geometry"/>
+                </mxCell>
+                <mxCell id="37" value="后再生加热盘管" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f8cecc;strokeColor=#b85450;" vertex="1" parent="1">
+                    <mxGeometry x="820" y="90" width="60" height="120" as="geometry"/>
+                </mxCell>
+                <mxCell id="40" value="" style="endArrow=classic;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" target="32">
+                    <mxGeometry width="50" height="50" relative="1" as="geometry">
+                        <mxPoint x="565" y="20" as="sourcePoint"/>
+                        <mxPoint x="540" y="70" as="targetPoint"/>
+                    </mxGeometry>
+                </mxCell>
+                <mxCell id="41" value="" style="endArrow=classic;html=1;exitX=0;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="7">
+                    <mxGeometry width="50" height="50" relative="1" as="geometry">
+                        <mxPoint x="60" y="180" as="sourcePoint"/>
+                        <mxPoint x="90" y="150" as="targetPoint"/>
+                    </mxGeometry>
+                </mxCell>
+                <mxCell id="42" value="" style="endArrow=classic;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" target="6">
+                    <mxGeometry width="50" height="50" relative="1" as="geometry">
+                        <mxPoint x="10" y="350" as="sourcePoint"/>
+                        <mxPoint x="100" y="160" as="targetPoint"/>
+                    </mxGeometry>
+                </mxCell>
+            </root>
+        </mxGraphModel>
+    </diagram>
+</mxfile>

+ 162 - 0
main.py

@@ -0,0 +1,162 @@
+import time
+from pprint import pprint
+
+import numpy as np
+import pandas as pd
+
+from .model.DHU_2 import DHU_2
+from ._opt.algorithm.sim_config import simulate_config as sim_opt_config
+from ._opt.algorithm.main import main as main_opt
+from ._opt.boundary.sim_config import simulate_config as sim_config_bound
+from ._opt.boundary.main import main as main_bound
+from ._opt.algorithm.model.model import SystemModel
+
+def main(*args,config=None):
+    
+    args = [_.reset_index(drop=True) for _ in args]
+    
+    var_cur = {
+        'coil_1_ToutA': args[0],
+        'coil_1_HoutA': args[1],
+        'HzP'         : args[2],
+        'HzR'         : args[3],
+        'coil_1_TinW' : args[4],
+        'coil_2_TinW' : args[5],
+        'coil_3_TinW' : args[6],
+        'coil_1_Val'  : args[7],
+        'coil_2_Val'  : args[8],
+        'coil_3_Val'  : args[9],
+        'wheel_1_TinR': args[10],
+        'wheel_2_TinR': args[11],
+        'mixed_1_TinM': args[12],
+        'mixed_1_HinM': args[13],
+        'mixed_2_TinM': args[14],
+        'mixed_2_HinM': args[15],
+    }
+    var_cur      = {k:v.set_axis([k],axis=1) for k,v in var_cur.items()}
+    var_opt_name = ['coil_2_Val','wheel_1_TinR','wheel_2_TinR']
+    var_sys_name = [_ for _ in var_cur.keys() if _ not in var_opt_name]
+    var_opt_cur  = {k:v for k,v in var_cur.items() if k in var_opt_name}
+    var_sys_cur  = {k:v for k,v in var_cur.items() if k in var_sys_name}
+    var_opt_var  = {
+        'coil_2_Val': main_bound(
+            None,None,None,
+            var_opt_cur['coil_2_Val'],
+            config = sim_config_bound(
+                opt_var    = ['coil_2_Val'],
+                syn_opt    = False,
+                var_type   = True,
+                lb_static  = 100,
+                ub_static  = 100,
+                var_precis = 1
+            )
+        ),
+        'wheel_1_TinR': main_bound(
+            None,None,None,
+            var_opt_cur['wheel_1_TinR'],
+            config = sim_config_bound(
+                opt_var    = ['wheel_1_TinR'],
+                syn_opt    = False,
+                var_type   = True,
+                lb_static  = 50,
+                ub_static  = 120,
+                var_precis = 1
+            )
+        ),
+        'wheel_2_TinR': main_bound(
+            None,None,None,
+            var_opt_cur['wheel_2_TinR'],
+            config = sim_config_bound(
+                opt_var    = ['wheel_2_TinR'],
+                syn_opt    = False,
+                var_type   = True,
+                lb_static  = 50,
+                ub_static  = 120,
+                var_precis = 1
+            )
+        ),
+    }
+    
+    # 获取优化结果
+    opt_config = sim_opt_config(
+        target        = 'Fs',
+        dir_min       = True,
+        var_opt       = var_opt_name,
+        var_sys       = var_sys_name,
+        diag_model    = False,
+        algorithm     = 'soea_DE_best_1_L_templet',
+        NIND          = 5000,
+        MAXGEN        = 200,
+        constrains    = [
+            'coil_3_DoutA-[coil_3_DoutA]<0',
+            # 'steamcoil_1_Fs-220<0',
+            # 'steamcoil_2_Fs-100<0',
+            # '40-steamcoil_1_Fs<0',
+            # '30-steamcoil_2_Fs<0',
+        ],
+        allow_neg_opt = False,
+    )
+    
+    system = AirSystem()
+    opt_output  = main_opt(
+        *list(var_opt_var.values()),
+        *list(var_sys_cur.values()),
+        system_model = system,
+        config       = opt_config
+    )
+    sys_output = system.predict(
+        pd.concat([*opt_output[3:],*list(var_sys_cur.values())],axis=1),
+    )
+    
+    show_data = None
+    # show_data = get_show_data(opt_res=opt_output[2])
+    # print(show_data)
+    
+    return [
+        sys_output.loc[:,['coil_2_ToutA']],
+        opt_output[3:][1],
+        opt_output[3:][2],
+        show_data
+    ]
+
+
+class AirSystem(SystemModel):
+    def __init__(self):
+        super().__init__()
+        # self.model = DHU_2().load_from_platform()
+        path = r'C:\Users\zhangshenhao\Documents\WorkSpace\zsh_project\004_溧阳宁德\除湿机模型\model.pkl'
+        self.model = DHU_2().load(path)
+    
+    def predict(self,data:pd.DataFrame) -> pd.DataFrame:
+        time_start   = time.time()
+        self.PENALTY = {}
+        self.index   = data.index
+        sys_out      = self.model.predict_system(data)
+        time_end     = time.time()
+        past_time    = round(time_end-time_start,2)
+        self.PAST_TIME.append(past_time)
+        print(f'第{len(self.PAST_TIME)}次调用系统模型,本次调用时长为:{past_time}秒 \n')    
+        return sys_out
+
+
+def get_show_data(opt_res:pd.DataFrame) -> pd.DataFrame:
+    var_names = {
+        'coil_3_DoutA'        : '送风露点(℃)',
+        'coil_2_Q'            : '中表冷冷量(kW)',
+        'coil_3_Q'            : '后表冷冷量(kW)',
+        'steamcoil_1_Q'       : '前再生热量(kW)',
+        'steamcoil_2_Q'       : '后再生热量(kW)',
+        'wheel_1_E_diff'      : '前转轮焓升(kJ/kg)',
+        'wheel_2_E_diff'      : '后转轮焓升(kJ/kg)',
+        'summary_cost_cooling': '耗冷费用折算(元/h)',
+        'summary_cost_heating': '耗热费用折算(元/h)',
+        'summary_cost_total'  : '冷热费用合计(元/h)',
+    }
+    res = (
+        opt_res
+        .loc[list(var_names.keys()),['Before','After']]
+        .assign(指标=list(var_names.values()))
+        .rename(columns={'Before':'优化前状态','After':'优化后状态'})
+        .loc[:,['指标','优化前状态','优化后状态']]
+    )
+    return res

+ 169 - 0
main2.py

@@ -0,0 +1,169 @@
+import time
+from pprint import pprint
+
+import numpy as np
+import pandas as pd
+
+from .model.DHU_2 import DHU_2
+from ._opt.algorithm.sim_config import simulate_config as sim_opt_config
+from ._opt.algorithm.main import main as main_opt
+from ._opt.boundary.sim_config import simulate_config as sim_config_bound
+from ._opt.boundary.main import main as main_bound
+from ._opt.algorithm.model.model import SystemModel
+
+def main(*args,config=None):
+    
+    args = [_.reset_index(drop=True) for _ in args]
+    
+    var_cur = {
+        'coil_1_ToutA': args[0],
+        'coil_1_HoutA': args[1],
+        'HzP'         : args[2],
+        'HzR'         : args[3],
+        'coil_1_TinW' : args[4],
+        'coil_2_TinW' : args[5],
+        'coil_3_TinW' : args[6],
+        'coil_1_Val'  : args[7],
+        'coil_2_Val'  : args[8],
+        'coil_3_Val'  : args[9],
+        'wheel_1_TinR': args[10],
+        'wheel_2_TinR': args[11],
+        'mixed_1_TinM': args[12],
+        'mixed_1_HinM': args[13],
+        'mixed_2_TinM': args[14],
+        'mixed_2_HinM': args[15],
+    }
+    var_cur      = {k:v.set_axis([k],axis=1) for k,v in var_cur.items()}
+    
+    
+    
+    
+    
+    
+    
+    var_opt_name = ['coil_2_Val','wheel_1_TinR','wheel_2_TinR']
+    var_sys_name = [_ for _ in var_cur.keys() if _ not in var_opt_name]
+    var_opt_cur  = {k:v for k,v in var_cur.items() if k in var_opt_name}
+    var_sys_cur  = {k:v for k,v in var_cur.items() if k in var_sys_name}
+    var_opt_var  = {
+        'coil_2_Val': main_bound(
+            None,None,None,
+            var_opt_cur['coil_2_Val'],
+            config = sim_config_bound(
+                opt_var    = ['coil_2_Val'],
+                syn_opt    = False,
+                var_type   = True,
+                lb_static  = 100,
+                ub_static  = 100,
+                var_precis = 1
+            )
+        ),
+        'wheel_1_TinR': main_bound(
+            None,None,None,
+            var_opt_cur['wheel_1_TinR'],
+            config = sim_config_bound(
+                opt_var    = ['wheel_1_TinR'],
+                syn_opt    = False,
+                var_type   = True,
+                lb_static  = 50,
+                ub_static  = 120,
+                var_precis = 1
+            )
+        ),
+        'wheel_2_TinR': main_bound(
+            None,None,None,
+            var_opt_cur['wheel_2_TinR'],
+            config = sim_config_bound(
+                opt_var    = ['wheel_2_TinR'],
+                syn_opt    = False,
+                var_type   = True,
+                lb_static  = 50,
+                ub_static  = 120,
+                var_precis = 1
+            )
+        ),
+    }
+    
+    # 获取优化结果
+    opt_config = sim_opt_config(
+        target        = 'Fs',
+        dir_min       = True,
+        var_opt       = var_opt_name,
+        var_sys       = var_sys_name,
+        diag_model    = False,
+        algorithm     = 'soea_DE_best_1_L_templet',
+        NIND          = 5000,
+        MAXGEN        = 200,
+        constrains    = [
+            'coil_3_DoutA-[coil_3_DoutA]<0',
+            # 'steamcoil_1_Fs-220<0',
+            # 'steamcoil_2_Fs-100<0',
+            # '40-steamcoil_1_Fs<0',
+            # '30-steamcoil_2_Fs<0',
+        ],
+        allow_neg_opt = False,
+    )
+    
+    system = AirSystem()
+    opt_output  = main_opt(
+        *list(var_opt_var.values()),
+        *list(var_sys_cur.values()),
+        system_model = system,
+        config       = opt_config
+    )
+    sys_output = system.predict(
+        pd.concat([*opt_output[3:],*list(var_sys_cur.values())],axis=1),
+    )
+    
+    show_data = None
+    # show_data = get_show_data(opt_res=opt_output[2])
+    # print(show_data)
+    
+    return [
+        sys_output.loc[:,['coil_2_ToutA']],
+        opt_output[3:][1],
+        opt_output[3:][2],
+        show_data
+    ]
+
+
+class AirSystem(SystemModel):
+    def __init__(self):
+        super().__init__()
+        # self.model = DHU_2().load_from_platform()
+        path = r'C:\Users\zhangshenhao\Documents\WorkSpace\zsh_project\004_溧阳宁德\除湿机模型\model.pkl'
+        self.model = DHU_2().load(path)
+    
+    def predict(self,data:pd.DataFrame) -> pd.DataFrame:
+        time_start   = time.time()
+        self.PENALTY = {}
+        self.index   = data.index
+        sys_out      = self.model.predict_system(data)
+        time_end     = time.time()
+        past_time    = round(time_end-time_start,2)
+        self.PAST_TIME.append(past_time)
+        print(f'第{len(self.PAST_TIME)}次调用系统模型,本次调用时长为:{past_time}秒 \n')    
+        return sys_out
+
+
+def get_show_data(opt_res:pd.DataFrame) -> pd.DataFrame:
+    var_names = {
+        'coil_3_DoutA'        : '送风露点(℃)',
+        'coil_2_Q'            : '中表冷冷量(kW)',
+        'coil_3_Q'            : '后表冷冷量(kW)',
+        'steamcoil_1_Q'       : '前再生热量(kW)',
+        'steamcoil_2_Q'       : '后再生热量(kW)',
+        'wheel_1_E_diff'      : '前转轮焓升(kJ/kg)',
+        'wheel_2_E_diff'      : '后转轮焓升(kJ/kg)',
+        'summary_cost_cooling': '耗冷费用折算(元/h)',
+        'summary_cost_heating': '耗热费用折算(元/h)',
+        'summary_cost_total'  : '冷热费用合计(元/h)',
+    }
+    res = (
+        opt_res
+        .loc[list(var_names.keys()),['Before','After']]
+        .assign(指标=list(var_names.values()))
+        .rename(columns={'Before':'优化前状态','After':'优化后状态'})
+        .loc[:,['指标','优化前状态','优化后状态']]
+    )
+    return res

+ 255 - 0
model/DHU_1.py

@@ -0,0 +1,255 @@
+from copy import deepcopy
+
+import numpy as np
+import pandas as pd
+
+import psychrolib
+psychrolib.SetUnitSystem(psychrolib.SI)
+get_Enthalpy = np.vectorize(psychrolib.GetMoistAirEnthalpy)
+
+from .._model._base import BaseModel
+from ..components.coil import CoolingCoil
+from ..components.wheel import WheelS2,WheelS3
+from ..components.mixed import Mixed
+
+class DHU_1(BaseModel):
+    
+    
+    def __init__(self) -> None:
+        super().__init__()
+    
+    def fit(self,param):
+        self.record_model(
+            model_name   = 'AIR',
+            model        = param,
+            train_data   = {'x':np.array([1])},
+            train_metric = {'R2':1,'MAE':1,'MAPE':1}
+        )
+        return self
+    
+    def fit2(self,input_data:dict,observe_data:dict):
+        import pymc as pm
+        with pm.Model() as self.MODEL:
+            prior = {
+                'coil_1' : CoolingCoil.prior('coil_1'),
+                'coil_2' : CoolingCoil.prior('coil_2'),
+                'coil_3' : CoolingCoil.prior('coil_3'),
+                'wheel_1': WheelS2.prior('wheel_1'),
+                'wheel_2': WheelS3.prior('wheel_2'),
+            }
+            output = self.model(**input_data,engine='pymc',param=prior)
+            
+    
+    def predict(self,**kwargs) -> dict:
+        param    = self.model_info['model_AIR']
+        kwargs   = deepcopy(kwargs)
+        COP      = kwargs.pop('COP')
+        price_E  = kwargs.pop('price_E')
+        price_S  = kwargs.pop('price_S')
+        pred_res = self.model(**kwargs,engine='numpy',param=param)
+        
+        # 中表冷冷量
+        coil_2_Ein  = get_Enthalpy(pred_res['mixed_1']['ToutA'],pred_res['mixed_1']['HoutA'])
+        coil_2_Eout = get_Enthalpy(pred_res['coil_2']['ToutA'],pred_res['coil_2']['HoutA'])
+        pred_res['coil_2']['Q'] = (coil_2_Eout - coil_2_Ein) * (pred_res['F_air']['FF_air']*66000) / (3.6*10**6)
+        
+        # 后表冷冷量
+        coil_3_Ein = get_Enthalpy(pred_res['wheel_2']['ToutP'],pred_res['wheel_2']['HoutP'])
+        coil_3_Eout = get_Enthalpy(pred_res['coil_3']['ToutA'],pred_res['coil_3']['HoutA'])
+        pred_res['coil_3']['Q'] = (coil_3_Eout - coil_3_Ein) * (pred_res['F_air']['FR_air']*66000) / (3.6*10**6)
+        
+        # 前再生热量
+        steamcoil_1_Ein  = get_Enthalpy(pred_res['mixed_2']['ToutA'],pred_res['mixed_2']['HoutA'])
+        steamcoil_1_Eout = get_Enthalpy(kwargs['wheel_1_TinR'],pred_res['mixed_2']['HoutA'])
+        pred_res['steamcoil_1']['Q'] = (steamcoil_1_Eout - steamcoil_1_Ein) * (pred_res['F_air']['FO_air']*66000) / (3.6*10**6)
+        
+        # 后再生热量
+        steamcoil_2_Ein  = get_Enthalpy(pred_res['wheel_2']['ToutC'],pred_res['wheel_2']['HoutC'])
+        steamcoil_2_Eout = get_Enthalpy(kwargs['wheel_2_TinR'],pred_res['wheel_2']['HoutC'])
+        pred_res['steamcoil_2']['Q'] = (steamcoil_2_Eout - steamcoil_2_Ein) * ((pred_res['F_air']['FO_air']-pred_res['F_air']['FS_air'])*66000) / (3.6*10**6)
+        
+        # 前转轮焓升
+        wheel_1_EinP  = get_Enthalpy(pred_res['coil_1']['ToutA'],pred_res['coil_1']['HoutA'])
+        wheel_1_EoutP = get_Enthalpy(pred_res['wheel_1']['ToutP'],pred_res['wheel_1']['HoutP'])
+        pred_res['wheel_1']['E_diff'] = (wheel_1_EoutP - wheel_1_EinP) / 1000
+        
+        # 后转轮焓升
+        wheel_2_EinP  = get_Enthalpy(pred_res['coil_2']['ToutA'],pred_res['coil_2']['HoutA'])
+        wheel_2_EoutP = get_Enthalpy(pred_res['wheel_2']['ToutP'],pred_res['wheel_2']['HoutP'])
+        pred_res['wheel_2']['E_diff'] = (wheel_2_EoutP - wheel_2_EinP) / 1000
+        
+        # 耗冷费用折算
+        pred_res['summary']['cost_cooling'] = np.abs(pred_res['coil_2']['Q'] + pred_res['coil_3']['Q']) / COP * price_E
+        
+        # 耗热费用折算
+        pred_res['summary']['cost_heating'] = (pred_res['steamcoil_1']['Q'] + pred_res['steamcoil_2']['Q']) *3600/2260/1000*price_S
+        
+        # 冷热费用合计
+        pred_res['summary']['cost_total'] = pred_res['summary']['cost_cooling'] + pred_res['summary']['cost_heating']
+        
+        return pred_res
+    
+    def predict_system(self,**kwargs) -> pd.DataFrame:
+        pred_res = self.predict(**kwargs)
+        system_output = {}
+        for equp_name,output_info in pred_res.items():
+            for output_name,output_value in output_info.items():
+                system_output[f'{equp_name}_{output_name}'] = output_value
+        system_output = dict(zip(
+            system_output.keys(),
+            np.broadcast_arrays(*system_output.values())
+        ))
+        system_output = pd.DataFrame(system_output)
+        return system_output
+        
+    
+    @classmethod
+    def model(
+        cls,
+        Tin_F,        # 新风温度
+        Hin_F,        # 新风湿度
+        HzP,          # 处理侧风机频率 
+        HzR,          # 再生侧风机频率
+        coil_1_TinW,  # 前表冷进水温度
+        coil_2_TinW,  # 中表冷进水温度
+        coil_3_TinW,  # 后表冷进水温度
+        coil_1_Val,   # 前表冷阀门开度
+        coil_2_Val,   # 中表冷阀门开度
+        coil_3_Val,   # 后表冷阀门开度
+        wheel_1_TinR, # 前转轮再生侧温度
+        wheel_2_TinR, # 后转轮再生侧温度
+        mixed_1_TinM, # 回风温度(处理侧)
+        mixed_1_HinM, # 回风湿度(处理侧)
+        mixed_2_TinM, # 补风温度(再生侧)
+        mixed_2_HinM, # 补风湿度(再生侧)
+        engine,
+        param
+    ) ->  dict:
+        # 空气的质量流量
+        FF_air = 1                 # 新风
+        FB_air = 0                 # 回风
+        FR_air = 0.74              # 送风
+        FS_air = 0.2               # 补风
+        FO_air = FF_air + FB_air + FS_air - FR_air # 排风
+        
+        # 水的质量流量
+        coil_1_FW = coil_1_Val
+        coil_2_FW = coil_2_Val
+        coil_3_FW = coil_3_Val
+        
+        
+        # 前表冷
+        coil_1_res = CoolingCoil.model(
+            TinA   = Tin_F,
+            HinA   = Hin_F,
+            FA     = FF_air,
+            TinW   = coil_1_TinW,
+            FW     = coil_1_FW,
+            engine = engine,
+            param  = param['coil_1']
+        )
+        
+        # 前转轮
+        wheel_1_res = WheelS2.model(
+            TinP   = coil_1_res['ToutA'],
+            HinP   = coil_1_res['HoutA'],
+            FP     = FF_air,
+            TinR   = wheel_1_TinR,
+            HinR   = 0,
+            FR     = FO_air,
+            engine = engine,
+            param  = param['wheel_1']
+        )
+        
+        # 处理侧混风(回风)
+        mixed_1_res = Mixed.model(
+            TinA   = wheel_1_res['ToutP'],
+            HinA   = wheel_1_res['HoutP'],
+            FA     = FF_air,
+            TinM   = mixed_1_TinM,
+            HinM   = mixed_1_HinM,
+            FM     = FB_air,
+            engine = engine
+        )
+        
+        # 中表冷
+        coil_2_res = CoolingCoil.model(
+            TinA   = mixed_1_res['ToutA'],
+            HinA   = mixed_1_res['HoutA'],
+            FA     = FR_air,
+            TinW   = coil_2_TinW,
+            FW     = coil_2_FW,
+            engine = engine,
+            param  = param['coil_2']
+        )
+        
+        
+        # 后转轮
+        wheel_2_res = WheelS3.model(
+            TinP   = coil_2_res['ToutA'],
+            HinP   = coil_2_res['HoutA'],
+            FP     = FR_air,
+            TinC   = wheel_1_res['ToutP'],
+            HinC   = wheel_1_res['HoutP'],
+            FC     = FO_air-FS_air,
+            TinR   = wheel_2_TinR,
+            HinR   = wheel_1_res['HoutP'],
+            FR     = FO_air-FS_air,
+            engine = engine,
+            param  = param['wheel_2'],
+        )
+        
+        # 后表冷
+        coil_3_res = CoolingCoil.model(
+            TinA   = wheel_2_res['ToutP'],
+            HinA   = wheel_2_res['HoutP'],
+            FA     = FR_air,
+            TinW   = coil_3_TinW,
+            FW     = coil_3_FW,
+            engine = engine,
+            param  = param['coil_3']
+        )
+        
+        # 再生侧混风(排风)
+        mixed_2_res = Mixed.model(
+            TinA   = wheel_2_res['ToutR'],
+            HinA   = wheel_2_res['HoutR'],
+            FA     = FO_air-FS_air,
+            TinM   = mixed_2_TinM,
+            HinM   = mixed_2_HinM,
+            FM     = FS_air,
+            engine = engine
+        )
+        
+        # 前转轮湿度修正
+        wheel_1_res_adj = WheelS2.model(
+            TinP   = coil_1_res['ToutA'],
+            HinP   = coil_1_res['HoutA'],
+            FP     = FF_air,
+            TinR   = wheel_1_TinR,
+            HinR   = mixed_2_res['HoutA'],
+            FR     = FO_air,
+            engine = engine,
+            param  = param['wheel_1']
+        )
+        
+        return {
+            'coil_1'     : coil_1_res,
+            'coil_2'     : coil_2_res,
+            'coil_3'     : coil_3_res,
+            'wheel_1'    : wheel_1_res_adj,
+            'wheel_2'    : wheel_2_res,
+            'mixed_1'    : mixed_1_res,
+            'mixed_2'    : mixed_2_res,
+            'steamcoil_1': {},
+            'steamcoil_2': {},
+            'F_air'      : {
+                'FF_air': FF_air,
+                'FB_air': FB_air,
+                'FR_air': FR_air,
+                'FS_air': FS_air,
+                'FO_air': FO_air
+            },
+            'summary':{}
+        }
+        

+ 356 - 0
model/DHU_2.py

@@ -0,0 +1,356 @@
+from copy import deepcopy
+
+import numpy as np
+import pandas as pd
+import pymc as pm
+try:
+    import plotnine as gg
+except:
+    pass
+
+import psychrolib
+psychrolib.SetUnitSystem(psychrolib.SI)
+get_Enthalpy = np.vectorize(psychrolib.GetMoistAirEnthalpy)
+
+from .._model._base import BaseModel
+from ..components.coil import CoolingCoil,SteamCoilFs,SteamCoilFs2
+from ..components.wheel import WheelS3
+from ..components.mixed import Mixed
+
+def record(name,var):
+    pm.Deterministic(f'{name}_mu',var)
+
+def observe(name,var,observed,sigma=1):
+    mu    = pm.Deterministic(f'{name}_mu',var)
+    sigma = pm.HalfNormal(f'{name}_sigma',sigma=sigma)
+    pm.Normal(name,mu=mu,sigma=sigma,observed=observed)
+
+
+class DHU_2(BaseModel):
+    
+    def __init__(self) -> None:
+        super().__init__()
+    
+    def fit(
+        self,
+        input_data   : pd.DataFrame,
+        observed_data: pd.DataFrame,
+        plot_TVP     : bool = True
+    ):
+        with pm.Model() as self.MODEL_PYMC:
+            param_prior = {
+                'wheel_1'    : WheelS3.prior('wheel_1'),
+                'wheel_2'    : WheelS3.prior('wheel_2'),
+                'coil_2'     : CoolingCoil.prior('coil_2'),
+                'coil_3'     : CoolingCoil.prior('coil_3'),
+                'steamcoil_1': SteamCoilFs2.prior('steamcoil_1'),
+                'steamcoil_2': SteamCoilFs.prior('steamcoil_2'),
+                'F_air'      : {
+                    'F_air_F': 12,                                                                # 新风
+                    'F_air_B': pm.TruncatedNormal('F_air_B',mu=25,sigma=1,initval=25,lower=0.1),  # 回风
+                    'F_air_R': pm.TruncatedNormal('F_air_R',mu=36,sigma=1,initval=36,lower=0.1),  # 送风
+                    'F_air_S': pm.TruncatedNormal('F_air_S',mu=3,sigma=1,initval=3,lower=0.1),    # 补风
+                }
+            }
+            res = DHU_2.model(
+                Tin_F        = input_data.coil_1_ToutA.values,
+                Hin_F        = input_data.coil_1_HoutA.values,
+                HzP          = None,
+                HzR          = None,
+                FF_air       = param_prior['F_air']['F_air_F'],
+                FB_air       = param_prior['F_air']['F_air_B'],
+                FR_air       = param_prior['F_air']['F_air_R'],
+                FS_air       = param_prior['F_air']['F_air_S'],
+                coil_1_TinW  = input_data.coil_1_TinW.values,
+                coil_2_TinW  = input_data.coil_2_TinW.values,
+                coil_3_TinW  = input_data.coil_3_TinW.values,
+                coil_1_Val   = input_data.coil_1_Val.values / 100,
+                coil_2_Val   = input_data.coil_2_Val.values / 100,
+                coil_3_Val   = input_data.coil_3_Val.values / 100,
+                wheel_1_TinR = input_data.wheel_1_TinR.values,
+                wheel_2_TinR = input_data.wheel_2_TinR.values,
+                mixed_1_TinM = input_data.mixed_1_TinM.values,
+                mixed_2_TinM = input_data.mixed_2_TinM.values,
+                mixed_1_HinM = input_data.mixed_1_HinM.values,
+                mixed_2_HinM = input_data.mixed_2_HinM.values,
+                engine       = 'pymc',
+                param        = param_prior
+            )
+            observe('mixed_1_ToutA',res['mixed_1']['ToutA'],observed=observed_data.mixed_1_ToutA.values)
+            observe('mixed_1_DoutA',res['mixed_1']['DoutA'],observed=observed_data.mixed_1_DoutA.values)
+            observe('wheel_1_ToutC',res['wheel_1']['ToutC'],observed=observed_data.wheel_1_ToutC.values)
+            observe('coil_2_ToutA',res['coil_2']['ToutA'],observed=observed_data.coil_2_ToutA.values)
+            observe('coil_2_DoutA',res['coil_2']['DoutA'],observed=observed_data.coil_2_DoutA.values)
+            observe('wheel_2_ToutP',res['wheel_2']['ToutP'],observed=observed_data.wheel_2_ToutP.values)
+            observe('wheel_2_DoutP',res['wheel_2']['DoutP'],observed=observed_data.wheel_2_DoutP.values)
+            observe('wheel_2_ToutR',res['wheel_2']['ToutR'],observed=observed_data.wheel_2_ToutR.values)
+            observe('steamcoil_1_FP',res['steamcoil_1']['FP'],observed=observed_data.steamcoil_1_FP.values,sigma=1000)
+            observe('steamcoil_1_Fs',res['steamcoil_1']['Fs'],observed=observed_data.steamcoil_1_Fs.values,sigma=20)
+            observe('steamcoil_2_Fs',res['steamcoil_2']['Fs'],observed=observed_data.steamcoil_2_Fs.values,sigma=20)
+            
+            record('wheel_2_ToutC',res['wheel_2']['ToutC'])
+            record('mixed_2_ToutA',res['mixed_2']['ToutA'])
+            
+            param_posterior = pm.find_MAP(maxeval=50000)
+            param_posterior_reorder = {'F_air':{'F':12}}
+            for equp_name in param_prior.keys():
+                param_posterior_reorder.setdefault(equp_name,{})
+                for param_name,param_value in param_posterior.items():
+                    if '__' in param_name:
+                        continue
+                    if param_name.startswith(equp_name):
+                        param_name_adj = param_name.replace(f'{equp_name}_','')
+                        param_posterior_reorder[equp_name][param_name_adj] = param_value
+            
+            if plot_TVP:
+                TVP_data = []
+                for param_name in param_posterior.keys():
+                    if param_name.replace('_mu','') not in observed_data.columns:
+                        continue
+                    TVP_data.append(
+                        pd.DataFrame(
+                            {
+                                'param_name': param_name.replace('_mu',''),
+                                'real'      : observed_data.loc[:,param_name.replace('_mu','')].values,
+                                'pred'      : param_posterior[param_name]
+                            }
+                        )
+                    )
+                gg.options.figure_size = (10,10)
+                plot = (
+                    pd.concat(TVP_data,axis=0)
+                    .pipe(gg.ggplot)
+                    + gg.aes(x='real',y='pred')
+                    + gg.geom_point()
+                    + gg.facet_wrap(facets='param_name',scales='free')
+                    + gg.geom_abline(intercept=0,slope=1,color='red')
+                )
+                plot.show()
+            
+            self.record_model(
+                model_name   = 'DHU',
+                model        = param_posterior_reorder,
+                train_data   = {'x':np.array([1])},
+                train_metric = {'R2':1,'MAE':1,'MAPE':1}
+            )
+            return self
+            
+    def predict(self,input_data:pd.DataFrame) -> dict:
+        param_posterior = self.model_info['model_DHU']
+        res = DHU_2.model(
+            Tin_F        = input_data.coil_1_ToutA.values,
+            Hin_F        = input_data.coil_1_HoutA.values,
+            HzP          = None,
+            HzR          = None,
+            FF_air       = param_posterior['F_air']['F'],
+            FB_air       = param_posterior['F_air']['B'],
+            FR_air       = param_posterior['F_air']['R'],
+            FS_air       = param_posterior['F_air']['S'],
+            coil_1_TinW  = input_data.coil_1_TinW.values,
+            coil_2_TinW  = input_data.coil_2_TinW.values,
+            coil_3_TinW  = input_data.coil_3_TinW.values,
+            coil_1_Val   = input_data.coil_1_Val.values / 100,
+            coil_2_Val   = input_data.coil_2_Val.values / 100,
+            coil_3_Val   = input_data.coil_3_Val.values / 100,
+            wheel_1_TinR = input_data.wheel_1_TinR.values,
+            wheel_2_TinR = input_data.wheel_2_TinR.values,
+            mixed_1_TinM = input_data.mixed_1_TinM.values,
+            mixed_2_TinM = input_data.mixed_2_TinM.values,
+            mixed_1_HinM = input_data.mixed_1_HinM.values,
+            mixed_2_HinM = input_data.mixed_2_HinM.values,
+            engine       = 'numpy',
+            param        = param_posterior
+        )
+        return res
+    
+    def predict_system(self,input_data:pd.DataFrame) -> pd.DataFrame:
+        pred_res = self.predict(input_data)
+        system_output = {}
+        for equp_name,output_info in pred_res.items():
+            for output_name,output_value in output_info.items():
+                system_output[f'{equp_name}_{output_name}'] = output_value
+        system_output       = pd.DataFrame(system_output)
+        system_output['Fs'] = system_output.steamcoil_1_Fs + system_output.steamcoil_2_Fs
+        return system_output
+        
+    
+    @classmethod
+    def model(
+        cls,
+        Tin_F,        # 前表冷后温度
+        Hin_F,        # 前表冷后湿度
+        HzP,          # 处理侧风机频率 
+        HzR,          # 再生侧风机频率
+        FF_air,
+        FB_air,
+        FR_air,
+        FS_air,
+        coil_1_TinW,  # 前表冷进水温度
+        coil_2_TinW,  # 中表冷进水温度
+        coil_3_TinW,  # 后表冷进水温度
+        coil_1_Val,   # 前表冷阀门开度
+        coil_2_Val,   # 中表冷阀门开度
+        coil_3_Val,   # 后表冷阀门开度
+        wheel_1_TinR, # 前转轮再生侧温度
+        wheel_2_TinR, # 后转轮再生侧温度
+        mixed_1_TinM, # 回风温度(处理侧)
+        mixed_1_HinM, # 回风湿度(处理侧)
+        mixed_2_TinM, # 补风温度(再生侧)
+        mixed_2_HinM, # 补风湿度(再生侧)
+        engine,
+        param
+    ) ->  dict:
+        # 空气的质量流量
+        # FF_air = 12                 # 新风
+        # FB_air = pm.TruncatedNormal('FB_air',mu=25,sigma=1,initval=25,lower=0.1)              # 回风
+        # FR_air = pm.TruncatedNormal('FR_air',mu=36,sigma=1,initval=36,lower=0.1)              # 送风
+        # FS_air = pm.TruncatedNormal('FS_air',mu=3,sigma=1,initval=3,lower=0.1)               # 补风
+        FO_air = FF_air + FB_air + FS_air - FR_air # 排风
+        
+        # 水的质量流量
+        coil_2_FW = coil_2_Val
+        coil_3_FW = coil_3_Val
+        
+        
+        # 前转轮
+        wheel_1_res = WheelS3.model(
+            TinP   = Tin_F,
+            HinP   = Hin_F,
+            FP     = FR_air - FB_air,
+            TinR   = wheel_1_TinR,
+            HinR   = 0,
+            FR     = FO_air,
+            TinC   = Tin_F,
+            HinC   = Hin_F,
+            FC     = FF_air+FB_air-FR_air,
+            engine = engine,
+            param  = param['wheel_1']
+        )
+        
+        # 处理侧混风(回风)
+        mixed_1_res = Mixed.model(
+            TinA   = wheel_1_res['ToutP'],
+            HinA   = wheel_1_res['HoutP'],
+            FA     = FR_air - FB_air,
+            TinM   = mixed_1_TinM,
+            HinM   = mixed_1_HinM,
+            FM     = FB_air,
+            engine = engine
+        )
+        
+        # 中表冷
+        coil_2_res = CoolingCoil.model(
+            TinA   = mixed_1_res['ToutA'],
+            HinA   = mixed_1_res['HoutA'],
+            FA     = FR_air,
+            TinW   = coil_2_TinW,
+            FW     = coil_2_FW,
+            engine = engine,
+            param  = param['coil_2']
+        )
+        
+        # 后转轮
+        wheel_2_res = WheelS3.model(
+            TinP   = coil_2_res['ToutA'],
+            HinP   = coil_2_res['HoutA'],
+            FP     = FR_air,
+            TinC   = wheel_1_res['ToutC'],
+            HinC   = wheel_1_res['HoutC'],
+            FC     = FF_air+FB_air-FR_air,
+            TinR   = wheel_2_TinR,
+            HinR   = 0,
+            FR     = FO_air-FS_air,
+            engine = engine,
+            param  = param['wheel_2'],
+        )
+        
+        # 后表冷
+        coil_3_res = CoolingCoil.model(
+            TinA   = wheel_2_res['ToutP'],
+            HinA   = wheel_2_res['HoutP'],
+            FA     = FR_air,
+            TinW   = coil_3_TinW,
+            FW     = coil_3_FW,
+            engine = engine,
+            param  = param['coil_3']
+        )
+        
+        # 后转轮湿度修正
+        wheel_2_res_adj = WheelS3.model(
+            TinP   = coil_2_res['ToutA'],
+            HinP   = coil_2_res['HoutA'],
+            FP     = FR_air,
+            TinC   = wheel_1_res['ToutC'],
+            HinC   = wheel_1_res['HoutC'],
+            FC     = FF_air+FB_air-FR_air,
+            TinR   = wheel_2_TinR,
+            HinR   = wheel_2_res['HoutC'],
+            FR     = FO_air-FS_air,
+            engine = engine,
+            param  = param['wheel_2'],
+        )
+
+        # 再生侧混风(排风)
+        mixed_2_res = Mixed.model(
+            TinA   = wheel_2_res_adj['ToutR'],
+            HinA   = wheel_2_res_adj['HoutR'],
+            FA     = FO_air-FS_air,
+            TinM   = mixed_2_TinM,
+            HinM   = mixed_2_HinM,
+            FM     = FS_air,
+            engine = engine
+        )
+        
+        # 前转轮湿度修正
+        wheel_1_res_adj = WheelS3.model(
+            TinP   = Tin_F,
+            HinP   = Hin_F,
+            FP     = FR_air - FB_air,
+            TinR   = wheel_1_TinR,
+            HinR   = mixed_2_res['HoutA'],
+            FR     = FO_air,
+            TinC   = Tin_F,
+            HinC   = Hin_F,
+            FC     = FF_air+FB_air-FR_air,
+            engine = engine,
+            param  = param['wheel_1']
+        )
+        
+        # 前蒸气盘管
+        steamcoil_1_res = SteamCoilFs2.model(
+            TinA   = mixed_2_res['ToutA'],
+            ToutA  = wheel_1_TinR,
+            FA     = FO_air,
+            param  = param['steamcoil_1'],
+            engine = engine
+        )
+        
+        # 后蒸气盘管
+        steamcoil_2_res = SteamCoilFs.model(
+            TinA   = wheel_2_res_adj['ToutC'],
+            ToutA  = wheel_2_TinR,
+            FA     = FO_air-FS_air,
+            param  = param['steamcoil_2'],
+            engine = engine
+        )
+        
+        
+        return {
+            'coil_2'     : coil_2_res,
+            'coil_3'     : coil_3_res,
+            'wheel_1'    : wheel_1_res_adj,
+            'wheel_2'    : wheel_2_res_adj,
+            'mixed_1'    : mixed_1_res,
+            'mixed_2'    : mixed_2_res,
+            'steamcoil_1': steamcoil_1_res,
+            'steamcoil_2': steamcoil_2_res,
+            'F_air'      : {
+                'FF_air': FF_air,
+                'FB_air': FB_air,
+                'FR_air': FR_air,
+                'FS_air': FS_air,
+                'FO_air': FO_air
+            },
+            'summary':{}
+        }
+        

+ 422 - 0
model/DHU_3.py

@@ -0,0 +1,422 @@
+import numpy as np
+import pandas as pd
+import pymc as pm
+import pytensor.tensor as pt
+from sklearn.metrics import r2_score,mean_absolute_error,mean_absolute_percentage_error
+try:
+    import plotnine as gg
+except:
+    pass
+
+from .._model._base import BaseModel
+from ..components.coil import CoolingCoil2,SteamCoilFs,SteamCoilFs2,SteamCoilFs3
+from ..components.wheel import WheelS3
+from ..components.mixed import Mixed
+
+def record(name,var):
+    pm.Deterministic(f'{name}_mu',var)
+
+def observe(name,var,observed,sigma=1):
+    mu    = pm.Deterministic(f'{name}_mu',var)
+    sigma = pm.HalfNormal(f'{name}_sigma',sigma=sigma)
+    pm.Normal(name,mu=mu,sigma=sigma,observed=observed)
+
+
+class DHU_3(BaseModel):
+    
+    def __init__(self) -> None:
+        super().__init__()
+    
+    def fit(
+        self,
+        input_data   : pd.DataFrame,
+        observed_data: pd.DataFrame,
+        rw_FA_val    : bool = False,
+        plot_TVP     : bool = True
+    ):
+        with pm.Model() as self.MODEL_PYMC:
+            param_prior = {
+                'wheel_1'    : WheelS3.prior('wheel_1'),
+                'wheel_2'    : WheelS3.prior('wheel_2'),
+                'coil_2'     : CoolingCoil2.prior('coil_2'),
+                'coil_3'     : CoolingCoil2.prior('coil_3'),
+                'steamcoil_1': SteamCoilFs2.prior('steamcoil_1'),
+                'steamcoil_2': SteamCoilFs.prior('steamcoil_2'),
+                'F_air'      : {
+                    'HzP_X' : pm.HalfNormal('F_air_HzP_X',sigma=1,initval=1),
+                    'HzP_H' : pm.HalfNormal('F_air_HzP_H',sigma=1,initval=0.1),
+                    'HzR_B' : pm.HalfNormal('F_air_HzR_B',sigma=1,initval=0.5),
+                    'X_base': pm.TruncatedNormal('F_air_X_base',mu=0.5,sigma=0.2,lower=0,initval=0.5),
+                    'H_base': pm.TruncatedNormal('F_air_H_base',mu=0.6,sigma=0.2,lower=0,upper=0.999,initval=0.6),
+                    'B_base': pm.TruncatedNormal('F_air_B_base',mu=0.2,sigma=0.1,lower=0,initval=0.1),
+                },
+                'mixed_1':{},
+                'mixed_2':{}
+            }
+            
+            if rw_FA_val:
+                N          = len(input_data)
+                period     = 30
+                n_segments = int(np.ceil(N/period))
+                remainder  = N % period
+                repeat     = [period] * (n_segments - 1) + ([remainder] if remainder != 0 else [])
+                rw     = pm.GaussianRandomWalk(
+                    'rw',sigma=0.1,init_dist=pm.Normal.dist(mu=0,sigma=0.3),shape=n_segments)
+                param_prior['F_air']['val_rw']  = pm.Deterministic('F_air_val_rw',pt.repeat(rw,repeat))
+                param_prior['F_air']['val_pct'] = pm.Beta('F_air_val_pct',alpha=8,beta=1,initval=0.9)
+            
+            res = DHU_3.model(
+                Tin_F        = input_data.coil_1_ToutA.values,
+                Hin_F        = input_data.coil_1_HoutA.values,
+                HzP          = input_data.HzP.values,
+                HzR          = input_data.HzR.values,
+                coil_1_TinW  = input_data.coil_1_TinW.values,
+                coil_2_TinW  = input_data.coil_2_TinW.values,
+                coil_3_TinW  = input_data.coil_3_TinW.values,
+                coil_1_Val   = input_data.coil_1_Val.values,
+                coil_2_Val   = input_data.coil_2_Val.values,
+                coil_3_Val   = input_data.coil_3_Val.values,
+                wheel_1_TinR = input_data.wheel_1_TinR.values,
+                wheel_2_TinR = input_data.wheel_2_TinR.values,
+                mixed_1_TinM = input_data.mixed_1_TinM.values,
+                mixed_2_TinM = input_data.mixed_2_TinM.values,
+                mixed_1_HinM = input_data.mixed_1_HinM.values,
+                mixed_2_HinM = input_data.mixed_2_HinM.values,
+                engine       = 'pymc',
+                param        = param_prior
+            )
+            observe('mixed_1_ToutA',res['mixed_1']['ToutA'],observed=observed_data.mixed_1_ToutA.values)
+            observe('mixed_1_DoutA',res['mixed_1']['DoutA'],observed=observed_data.mixed_1_DoutA.values)
+            observe('wheel_1_ToutC',res['wheel_1']['ToutC'],observed=observed_data.wheel_1_ToutC.values)
+            observe('coil_2_ToutA',res['coil_2']['ToutA'],observed=observed_data.coil_2_ToutA.values)
+            observe('coil_2_DoutA',res['coil_2']['DoutA'],observed=observed_data.coil_2_DoutA.values)
+            observe('wheel_2_ToutP',res['wheel_2']['ToutP'],observed=observed_data.wheel_2_ToutP.values)
+            observe('wheel_2_DoutP',res['wheel_2']['DoutP'],observed=observed_data.wheel_2_DoutP.values)
+            observe('wheel_2_ToutR',res['wheel_2']['ToutR'],observed=observed_data.wheel_2_ToutR.values)
+            observe('steamcoil_1_FP',res['steamcoil_1']['FP'],observed=observed_data.steamcoil_1_FP.values,sigma=1000)
+            observe('steamcoil_1_Fs',res['steamcoil_1']['Fs'],observed=observed_data.steamcoil_1_Fs.values,sigma=20)
+            observe('steamcoil_2_Fs',res['steamcoil_2']['Fs'],observed=observed_data.steamcoil_2_Fs.values,sigma=20)
+            
+            record('wheel_2_ToutC',res['wheel_2']['ToutC'])
+            record('mixed_2_ToutA',res['mixed_2']['ToutA'])
+            record('wheel_1_FaP',res['wheel_1']['FP'])
+            record('wheel_1_FaR',res['wheel_1']['FR'])
+            record('wheel_1_FaC',res['wheel_1']['FC'])
+            record('mixed_1_FaA',res['mixed_1']['FA'])
+            record('mixed_1_FaM',res['mixed_1']['FM'])
+            record('F_air_S',res['Fa']['Fa_S'])
+            record('F_air_H',res['Fa']['Fa_H'])
+            record('F_air_X',res['Fa']['Fa_X'])
+            
+            self.param_posterior = pm.find_MAP(maxeval=50000,include_transformed=False)
+            param_posterior_reorder = {'F_air':{}}
+            for equp_name in param_prior.keys():
+                param_posterior_reorder.setdefault(equp_name,{})
+                for param_name,param_value in self.param_posterior.items():
+                    if '__' in param_name:
+                        continue
+                    if param_name == 'F_air_val_rw':
+                        param_value = np.median(param_value[-5:])
+                    if param_name.startswith(equp_name):
+                        param_name_adj = param_name.replace(f'{equp_name}_','')
+                        param_posterior_reorder[equp_name][param_name_adj] = param_value
+            self.record_model(
+                model_name   = 'DHU',
+                model        = param_posterior_reorder,
+                train_data   = {'x':np.array([1])},
+                train_metric = {'R2':1,'MAE':1,'MAPE':1}
+            )
+            
+            # 样本内预测数据
+            TVP_data = []
+            for param_name in self.param_posterior.keys():
+                if param_name.replace('_mu','') not in observed_data.columns:
+                    continue
+                TVP_data.append(
+                    pd.DataFrame(
+                        {
+                            'param_name': param_name.replace('_mu',''),
+                            'real'      : observed_data.loc[:,param_name.replace('_mu','')].values,
+                            'pred'      : self.param_posterior[param_name]
+                        }
+                    )
+                )
+            self.TVP_data = pd.concat(TVP_data,axis=0)
+            
+            group_by_data = self.TVP_data.groupby(['param_name'])[['pred','real']]
+            self.TVP_metric = (
+                pd.concat(
+                    [
+                        group_by_data.apply(lambda dt:r2_score(dt.real,dt.pred)),
+                        group_by_data.apply(lambda dt:mean_absolute_error(dt.real,dt.pred)),
+                        group_by_data.apply(lambda dt:mean_absolute_percentage_error(dt.real,dt.pred)),
+                    ],
+                    axis=1
+                )
+                .set_axis(['R2','MAE','MAPE'],axis=1)
+                .sort_values(by='R2',ascending=True)
+            )
+            if plot_TVP:
+                gg.options.figure_size = (10,10)
+                plot = (
+                    self.TVP_data
+                    .pipe(gg.ggplot)
+                    + gg.aes(x='real',y='pred')
+                    + gg.geom_point()
+                    + gg.facet_wrap(facets='param_name',scales='free')
+                    + gg.geom_abline(intercept=0,slope=1,color='red')
+                )
+                plot.show()
+            
+            return self
+            
+    def predict(self,input_data:pd.DataFrame) -> dict:
+        param_posterior = self.model_info['model_DHU']
+        res = DHU_3.model(
+            Tin_F        = input_data.coil_1_ToutA.values,
+            Hin_F        = input_data.coil_1_HoutA.values,
+            HzP          = input_data.HzP.values,
+            HzR          = input_data.HzR.values,
+            coil_1_TinW  = input_data.coil_1_TinW.values,
+            coil_2_TinW  = input_data.coil_2_TinW.values,
+            coil_3_TinW  = input_data.coil_3_TinW.values,
+            coil_1_Val   = input_data.coil_1_Val.values,
+            coil_2_Val   = input_data.coil_2_Val.values,
+            coil_3_Val   = input_data.coil_3_Val.values,
+            wheel_1_TinR = input_data.wheel_1_TinR.values,
+            wheel_2_TinR = input_data.wheel_2_TinR.values,
+            mixed_1_TinM = input_data.mixed_1_TinM.values,
+            mixed_2_TinM = input_data.mixed_2_TinM.values,
+            mixed_1_HinM = input_data.mixed_1_HinM.values,
+            mixed_2_HinM = input_data.mixed_2_HinM.values,
+            engine       = 'numpy',
+            param        = param_posterior
+        )
+        return res
+    
+    def predict_system(self,input_data:pd.DataFrame) -> pd.DataFrame:
+        pred_res = self.predict(input_data)
+        system_output = {}
+        for equp_name,output_info in pred_res.items():
+            for output_name,output_value in output_info.items():
+                system_output[f'{equp_name}_{output_name}'] = output_value
+        system_output       = pd.DataFrame(system_output)
+        system_output['Fs'] = system_output.steamcoil_1_Fs + system_output.steamcoil_2_Fs
+        return system_output
+        
+    
+    @classmethod
+    def model(
+        cls,
+        Tin_F,        # 前表冷后温度
+        Hin_F,        # 前表冷后湿度
+        HzP,          # 处理侧风机频率 
+        HzR,          # 再生侧风机频率
+        coil_1_TinW,  # 前表冷进水温度
+        coil_2_TinW,  # 中表冷进水温度
+        coil_3_TinW,  # 后表冷进水温度
+        coil_1_Val,   # 前表冷阀门开度
+        coil_2_Val,   # 中表冷阀门开度
+        coil_3_Val,   # 后表冷阀门开度
+        wheel_1_TinR, # 前转轮再生侧温度
+        wheel_2_TinR, # 后转轮再生侧温度
+        mixed_1_TinM, # 回风温度(处理侧)
+        mixed_1_HinM, # 回风湿度(处理侧)
+        mixed_2_TinM, # 补风温度(再生侧)
+        mixed_2_HinM, # 补风湿度(再生侧)
+        engine,
+        param
+    ) ->  dict:
+        
+        # 水的质量流量
+        coil_2_FW = coil_2_Val / 100
+        coil_3_FW = coil_3_Val / 100
+        
+        # 空气的质量流量
+        F_air_HzP_H = param['F_air']['HzP_H']
+        F_air_HzP_X = param['F_air']['HzP_X']
+        F_air_HzP_S = F_air_HzP_H + F_air_HzP_X
+        F_air_HzR_B = param['F_air']['HzR_B']
+        
+        F_air_S_base  = 1
+        F_air_X_base  = param['F_air']['X_base']
+        F_air_H_base  = param['F_air']['H_base']
+        F_air_B_base  = param['F_air']['B_base']
+        F_air_val_rw  = param['F_air'].get('val_rw',0)
+        F_air_val_pct = param['F_air'].get('val_pct',0)
+        
+        F_air_X_base_adj = F_air_X_base + F_air_val_rw
+        F_air_H_base_adj = F_air_H_base - F_air_val_rw * F_air_val_pct
+        F_air_B_base_adj = F_air_B_base - F_air_val_rw * (1 - F_air_val_pct)
+        
+        Fa_S = F_air_S_base + F_air_HzP_S * (HzP / 50)            
+        Fa_H = F_air_H_base_adj + F_air_HzP_H * (HzP / 50)            
+        Fa_X = F_air_X_base_adj + F_air_HzP_X * (HzP / 50)
+        Fa_B = F_air_B_base_adj + F_air_HzR_B * (HzR / 50)           
+        Fa_P = Fa_B + Fa_X + Fa_H - Fa_S  
+        
+        wheel_1_FaP    = Fa_S - Fa_H
+        wheel_1_FaC    = Fa_X - wheel_1_FaP
+        wheel_1_FaR    = Fa_P
+        wheel_2_FaP    = Fa_S
+        wheel_2_FaC    = wheel_1_FaC
+        wheel_2_FaR    = wheel_1_FaC
+        mixed_1_FaM    = Fa_H
+        mixed_1_FaA    = wheel_1_FaP
+        mixed_2_FaM    = Fa_B
+        mixed_2_FaA    = wheel_1_FaC
+        coil_2_FaA     = Fa_S
+        coil_3_FaA     = Fa_S
+        steamcoil_1_Fa = Fa_P
+        steamcoil_2_Fa = wheel_1_FaC
+        
+        # 前转轮
+        wheel_1_res = WheelS3.model(
+            TinP   = Tin_F,
+            HinP   = Hin_F,
+            FP     = wheel_1_FaP,
+            TinR   = wheel_1_TinR,
+            HinR   = 0,
+            FR     = wheel_1_FaR,
+            TinC   = Tin_F,
+            HinC   = Hin_F,
+            FC     = wheel_1_FaC,
+            engine = engine,
+            param  = param['wheel_1']
+        )
+        
+        # 处理侧混风(回风)
+        mixed_1_res = Mixed.model(
+            TinA   = wheel_1_res['ToutP'],
+            HinA   = wheel_1_res['HoutP'],
+            FA     = mixed_1_FaA,
+            TinM   = mixed_1_TinM,
+            HinM   = mixed_1_HinM,
+            FM     = mixed_1_FaM,
+            engine = engine
+        )
+        
+        # 中表冷
+        coil_2_res = CoolingCoil2.model(
+            TinA   = mixed_1_res['ToutA'],
+            HinA   = mixed_1_res['HoutA'],
+            FA     = coil_2_FaA,
+            TinW   = coil_2_TinW,
+            FW     = coil_2_FW,
+            engine = engine,
+            param  = param['coil_2']
+        )
+        
+        # 后转轮
+        wheel_2_res = WheelS3.model(
+            TinP   = coil_2_res['ToutA'],
+            HinP   = coil_2_res['HoutA'],
+            FP     = wheel_2_FaP,
+            TinC   = wheel_1_res['ToutC'],
+            HinC   = wheel_1_res['HoutC'],
+            FC     = wheel_2_FaC,
+            TinR   = wheel_2_TinR,
+            HinR   = 0,
+            FR     = wheel_2_FaR,
+            engine = engine,
+            param  = param['wheel_2'],
+        )
+        
+        # 后表冷
+        coil_3_res = CoolingCoil2.model(
+            TinA   = wheel_2_res['ToutP'],
+            HinA   = wheel_2_res['HoutP'],
+            FA     = coil_3_FaA,
+            TinW   = coil_3_TinW,
+            FW     = coil_3_FW,
+            engine = engine,
+            param  = param['coil_3']
+        )
+        
+        # 后转轮湿度修正
+        wheel_2_res_adj = WheelS3.model(
+            TinP   = coil_2_res['ToutA'],
+            HinP   = coil_2_res['HoutA'],
+            FP     = wheel_2_FaP,
+            TinC   = wheel_1_res['ToutC'],
+            HinC   = wheel_1_res['HoutC'],
+            FC     = wheel_2_FaC,
+            TinR   = wheel_2_TinR,
+            HinR   = wheel_2_res['HoutC'],
+            FR     = wheel_2_FaR,
+            engine = engine,
+            param  = param['wheel_2'],
+        )
+
+        # 再生侧混风(排风)
+        mixed_2_res = Mixed.model(
+            TinA   = wheel_2_res_adj['ToutR'],
+            HinA   = wheel_2_res_adj['HoutR'],
+            FA     = mixed_2_FaA,
+            TinM   = mixed_2_TinM,
+            HinM   = mixed_2_HinM,
+            FM     = mixed_2_FaM,
+            engine = engine
+        )
+        
+        # 前转轮湿度修正
+        wheel_1_res_adj = WheelS3.model(
+            TinP   = Tin_F,
+            HinP   = Hin_F,
+            FP     = wheel_1_FaP,
+            TinR   = wheel_1_TinR,
+            HinR   = mixed_2_res['HoutA'],
+            FR     = wheel_1_FaR,
+            TinC   = Tin_F,
+            HinC   = Hin_F,
+            FC     = wheel_1_FaC,
+            engine = engine,
+            param  = param['wheel_1']
+        )
+        
+        # 前蒸气盘管
+        steamcoil_1_res = SteamCoilFs2.model(
+            TinA   = mixed_2_res['ToutA'],
+            ToutA  = wheel_1_TinR,
+            FA     = steamcoil_1_Fa,
+            param  = param['steamcoil_1'],
+            engine = engine
+        )
+        # steamcoil_1_res = SteamCoilFs3.model(
+        #     TinA   = mixed_2_res['ToutA'],
+        #     ToutA  = wheel_1_TinR,
+        #     HinA   = mixed_2_res['DoutA'],
+        #     HoutA  = mixed_2_res['HoutA'],
+        #     FA     = steamcoil_1_Fa,
+        #     param  = param['steamcoil_1'],
+        #     engine = engine
+        # )
+        
+        # 后蒸气盘管
+        steamcoil_2_res = SteamCoilFs.model(
+            TinA   = wheel_2_res_adj['ToutC'],
+            ToutA  = wheel_2_TinR,
+            FA     = steamcoil_2_Fa,
+            param  = param['steamcoil_2'],
+            engine = engine
+        )
+        
+        return {
+            'coil_2'     : coil_2_res,
+            'coil_3'     : coil_3_res,
+            'wheel_1'    : wheel_1_res_adj,
+            'wheel_2'    : wheel_2_res_adj,
+            'mixed_1'    : mixed_1_res,
+            'mixed_2'    : mixed_2_res,
+            'steamcoil_1': steamcoil_1_res,
+            'steamcoil_2': steamcoil_2_res,
+            'Fa':{
+                'Fa_S': Fa_S,
+                'Fa_H': Fa_H,
+                'Fa_X': Fa_X,
+                'Fa_P': Fa_P,
+                'Fa_B': Fa_B,
+            },
+            'summary'    : {}
+        }
+        

+ 122 - 0
tools/enthalpy.py

@@ -0,0 +1,122 @@
+import numpy as np
+try:
+    import pymc as pm
+    from pytensor.tensor import where
+except:
+    pass
+
+PRESSURE = 101325
+
+# 计算含湿量
+def get_HumRatio_from_Dew(dew_point,engine):
+    """
+    :return: 含湿量 kg 水蒸气 / kg 干空气
+    """
+    if engine == 'pymc':
+        EXP = pm.math.exp
+    elif engine == 'numpy':
+        EXP = np.exp
+    
+    vapor_pressure = 6.112 * EXP(17.67 * dew_point / (dew_point + 243.5)) * 100
+    HumRatio       = (0.622 * vapor_pressure) / (PRESSURE - vapor_pressure)
+    return HumRatio
+
+def get_Dew_from_HumRatio(HumRatio,engine):
+    """
+    :param HumRatio: 含湿量 kg 水蒸气 / kg 干空气
+    :return: 露点温度
+    """
+    if engine == 'pymc':
+        LOG = pm.math.log
+        WHERE = where
+    elif engine == 'numpy':
+        LOG = np.log
+        WHERE = np.where
+    HumRatio       = WHERE(HumRatio<1e-5,1e-5,HumRatio)
+    vapor_pressure = (HumRatio * PRESSURE) / (0.622 + HumRatio)
+    x              = LOG(vapor_pressure / (6.112 * 100))
+    dew_point      = (243.5 * x) / (17.67 - x)
+    return dew_point
+
+def get_Enthalpy_from_Tdb_and_HumRatio(Tdb,Hr,engine):
+    # return: 焓值 (kJ/kg干空气)
+    # 常数
+    c_pa     = 1.006  # 干空气比热容 (kJ/kg·K)
+    c_pv     = 1.805  # 水蒸气比热容 (kJ/kg·K)
+    h_fg     = 2501   # 水的汽化潜热 (kJ/kg)
+    Enthalpy = c_pa * Tdb + Hr * (h_fg + c_pv * Tdb)
+    return Enthalpy
+
+def get_Enthalpy_from_Tdb_and_Dew(Tdb,Dew,engine):
+    Hr       = get_HumRatio_from_Dew(Dew,engine)
+    Enthalpy = get_Enthalpy_from_Tdb_and_HumRatio(Tdb,Hr,engine)
+    return Enthalpy
+
+def get_mixed_Dew(F1,F2,Dew1,Dew2,engine):
+    
+    if engine == 'pymc':
+        LOG = pm.math.log
+        EXP = pm.math.exp
+    elif engine == 'numpy':
+        LOG = np.log
+        EXP = np.exp
+    
+    # Antoine 方程计算饱和水蒸气压力
+    def saturation_pressure(T):
+        return 611 * EXP(17.27 * T / (T + 237.3))
+    # 计算湿度比
+    def humidity_ratio(e_s):
+        return 0.622 * e_s / (PRESSURE - e_s)
+    # 计算混合后的露点温度
+    def dew_point_temperature(e_mix):
+        return (237.3 * LOG(e_mix / 611)) / (17.27 - LOG(e_mix / 611))
+
+    # 计算空气 A 和空气 B 的饱和水蒸气压力
+    e_s1 = saturation_pressure(Dew1)
+    e_s2 = saturation_pressure(Dew2)
+
+    # 计算空气 A 和空气 B 的湿度比
+    W1 = humidity_ratio(e_s1)
+    W2 = humidity_ratio(e_s2)
+
+    W_mix = (F1 * W1 + F2 * W2) / (F1 + F2) # 计算混合后的湿度比
+    e_mix = (W_mix * PRESSURE) / (0.622 + W_mix)   # 计算混合后的水蒸气分压
+    d_mix = dew_point_temperature(e_mix)    # 计算混合后的露点温度
+
+    return d_mix
+
+def get_RH_from_Tdb_and_Hr(Tdb, Hr, engine):
+    """
+    计算相对湿度 (0~1),基于 ASHRAE 标准的高精度方法
+    
+    参数:
+        Tdb: 干球温度 (°C)
+        Hr: 绝对湿度 (kg/kg)
+        P: 大气压力 (Pa),默认 101325 Pa (1 atm)
+    
+    返回:
+        RH: 相对湿度 (0~1)
+    """
+    if engine == 'pymc':
+        LOG = pm.math.log
+        EXP = pm.math.exp
+    elif engine == 'numpy':
+        LOG = np.log
+        EXP = np.exp
+    # 1. 计算饱和水蒸气压力 (Pa) - Hyland-Wexler 公式 (ASHRAE 标准)
+    T_kelvin = Tdb + 273.15  # 转换为开尔文温度
+    # 饱和水蒸气压力 (Pa) - Hyland-Wexler (1983)
+    ln_es = (
+        -5.8002206e3 / T_kelvin
+        + 1.3914993
+        - 4.8640239e-2 * T_kelvin
+        + 4.1764768e-5 * T_kelvin**2
+        - 1.4452093e-8 * T_kelvin**3
+        + 6.5459673 * LOG(T_kelvin)
+    )
+    es = EXP(ln_es)
+    # 2. 计算实际水蒸气压力 (Pa)
+    e = Hr * PRESSURE / (0.621945 + Hr)  # 0.621945 ≈ 分子量比 (18.01528 / 28.966)
+    # 3. 计算相对湿度 (RH)
+    RH = e / es
+    return RH