diff --git a/.gitignore b/.gitignore index 6a86f38..48f2787 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,8 @@ MANIFEST */.ipynb_checkpoints/* **/NOTES **/*.mp4 + +results +benchmark +logs +etc \ No newline at end of file diff --git a/Dockerfile.wine_quality b/Dockerfile.wine_quality new file mode 100644 index 0000000..fbedb76 --- /dev/null +++ b/Dockerfile.wine_quality @@ -0,0 +1,66 @@ +# Wine Quality Experiment Dockerfile +# Extends the base pnpxai-experiments image with additional XAI frameworks + +# Use the base image from README +FROM seongun/ubuntu22.04-cuda12.2.2-cudnn8-pytorch2.1:base + +# Set working directory +WORKDIR /root/pnpxai-experiments + +# Reinstall pnpxai from exp/tab branch +RUN pip uninstall -y pnpxai && \ + pip install --no-cache-dir git+https://github.com/OpenXAIProject/pnpxai.git@exp/tab + +# Install additional XAI frameworks for Wine Quality experiments +# Pin specific versions for reproducibility (verified on 2025-11-13) +# Note: shap<=0.44.0 required for OmniXAI v1.3.2 compatibility (output format changed in 0.45.0) +RUN pip install --no-cache-dir \ + captum==0.8.0 \ + scikit-learn==1.1.3 \ + pandas==2.3.3 \ + xgboost==3.1.1 \ + shap==0.44.0 \ + lime==0.2.0.1 \ + pyyaml==6.0 \ + tqdm==4.66.0 \ + ucimlrepo + +# Install XAI frameworks from GitHub +# Pin to specific versions for reproducibility (verified on 2025-11-13) + +# OmniXAI v1.3.2 - Salesforce's comprehensive XAI library +RUN pip install --no-cache-dir git+https://github.com/salesforce/OmniXAI.git@v1.3.2 + +# OpenXAI v0.1 - Standardized XAI evaluation framework +# Note: Uses latest commit as no version tags available +RUN pip install --no-cache-dir git+https://github.com/AI4LIFE-GROUP/OpenXAI.git + +# Create separate virtual environment for AutoXAI +# AutoXAI code is mounted from experiments/scripts/lib/AutoXAI (not cloned in Docker) +# AutoXAI requires bayesian-optimization which needs numpy<2.0, but other frameworks need numpy>=2.0 +# AutoXAI also requires aix360, which depends on xport (requires pandas<1.4) and cvxpy +RUN python -m venv /opt/autoxai_venv && \ + /opt/autoxai_venv/bin/pip install --upgrade pip && \ + /opt/autoxai_venv/bin/pip install --no-cache-dir \ + 'numpy<2.0' \ + scikit-learn==1.1.3 \ + scikit-learn-extra \ + scikit-optimize \ + 'pandas<1.4' \ + xgboost==3.1.1 \ + shap==0.49.1 \ + lime==0.2.0.1 \ + aix360 \ + xport \ + cvxpy \ + pyyaml==6.0 \ + tqdm==4.66.0 \ + bayesian-optimization \ + ucimlrepo && \ + /opt/autoxai_venv/bin/pip install --no-cache-dir git+https://github.com/OpenXAIProject/pnpxai.git@exp/tab + +# Clean up pip cache to reduce image size +RUN pip cache purge + +# Set default command +CMD ["/bin/bash"] diff --git a/README.md b/README.md index f182c57..cd09b00 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,17 @@ This repository contains the official implementation and experimental code for t **PnPXAI** is a framework designed to overcome the challenges in generating reliable explanations for complex AI models through an end-to-end automated pipeline. This pipeline includes model architecture detection, applicable explainer recommendation, objective-driven hyperparameter optimization (HPO), and evaluation. This repository provides the necessary code to set up the environment and reproduce the key experiments presented in the paper, demonstrating PnPXAI's effectiveness across various tasks and modalities. +## Table of Contents +- [Setup](#setup) +- [Running Experiments](#running-experiments) + - [Experiment 1: ImageNet Explanation](#experiment-1-imagenet-explanation) + - [Experiment 2: Hyperparameter Impact Analysis](#experiment-2-hyperparameter-impact-analysis) + - [Experiment 3: Liver Tumor Explanation](#experiment-3-liver-tumor-explanation) + - [Experiment 4: Acute Kidney Injury (AKI) Explanation](#experiment-4-acute-kidney-injury-aki-explanation) + - [Experiment 5: ECG Explanation](#experiment-5-ecg-explanation) + - [Experiment 6: Wine Quality Explanation](#experiment-6-wine-quality-explanation) +- [License](#license) + ## Setup We provide two ways to set up the environment: using Docker (recommended for exact reproducibility) or manual installation. @@ -29,10 +40,12 @@ We provide two ways to set up the environment: using Docker (recommended for exa ``` 3. **Run the Docker container:** - This command starts an interactive container, mounts the project code, and assigns GPUs. Adjust `-v` source path and `--gpus` devices as needed. + This command starts an interactive container, mounts the project code, and assigns GPUs. Adjust mount paths and `--gpus` devices as needed. The ImageNet paths are recommended for running Experiment 2. ```bash docker run -it \ -v "$(pwd)":/root/pnpxai-experiments \ + -v /PATH_TO_IMAGENET/ImageNet1k:/root/pnpxai-experiments/data/ImageNet/ImageNet1k:ro \ + -v /PATH_TO_IMAGENET/ImageNet1k_info:/root/pnpxai-experiments/data/ImageNet/ImageNet1k_info:ro \ --gpus '"device=0"' \ # Example: Assign GPU 0 --name pnpxai_exp \ seongun/ubuntu22.04-cuda12.2.2-cudnn8-pytorch2.1:base @@ -77,35 +90,108 @@ We provide two ways to set up the environment: using Docker (recommended for exa pip install -e . ``` -## Data +## Running Experiments + +This repository contains code for various experiments presented in the PnPXAI paper. Each experiment can typically be run using scripts located in [`experiments/scripts/`](experiments/scripts/). -The **Liver Tumor Classification dataset** used in these experiments is hosted on Hugging Face Hub: -[➡️ seongun/liver-tumor-classification](https://huggingface.co/datasets/seongun/liver-tumor-classification) +### Experiment 1: ImageNet Explanation -This dataset contains individual 2D CT scan slices derived from the original [LiTS dataset](https://doi.org/10.1016/j.media.2022.102680). Data is stored as PNG images (`sample`, `w_sample`, `mask`) linked via a `metadata.jsonl` file. +This experiment qualitatively analyzes the effect of HPO (optimizing for AbPC) on explanations of `LRPUniformEpsilon`, `IntegratedGradients`, and `KernelShap` on [ImageNet1k](https://www.image-net.org/index.php) samples, evaluating the change in faithfulness metrics (MoRF, LeRF, AbPC). -The experiment scripts (`experiments/scripts/`) **automatically download** the necessary data files (metadata and specific image instances required) using the `huggingface_hub` library when first executed. The downloaded data will be stored in the Hugging Face cache directory (usually `~/.cache/huggingface/datasets`). +#### Data and Model -For more details on the data loading process, refer to the `get_livertumor_dataset_from_hf` function within [`experiments/utils/helpers.py`](./experiments/utils/helpers.py). + * **Data (ImageNet)**: The subset of ImageNet1k for this experiment, one sample per label, a total of 1,000 samples, is hosted on Hugging Face Hub: [➡️ geonhyeongkim/imagenet-samples-for-pnpxai-experiments](https://huggingface.co/datasets/geonhyeongkim/imagenet-samples-for-pnpxai-experiments). The script **automatically downloads** the necessary files when first executed. For more details on the data loading process, refer to the `get_imagenet_samples_from_hf` function within [`experiments/utils/datasets.py`](./experiments/utils/datasets.py). -## Pre-trained Model + * **Model (ResNet-18):** This script uses a standard `ResNet-18` model pre-trained on ImageNet, loaded directly from `torchvision.models`. -The pre-trained **ResNet-50 model** adapted for liver tumor classification is hosted on Hugging Face Hub: -[➡️ seongun/resnet50-livertumor](https://huggingface.co/seongun/resnet50-livertumor) +#### Usage -Similar to the dataset, the experiment scripts **automatically download** the model weights using the `huggingface_hub` library when needed. The model architecture definition (`ResNet50LiverTumor`) is included in [`experiments/models/liver_tumor.py`](./experiments/models/liver_tumor.py). +```bash +python -m experiments.scripts.analyze_imagenet_hpo \ + --data_id 72 \ + --save_dir results/analyze_imagenet_hpo/ \ + --seed 42 \ + --n_trials 100 \ + --analyze \ + --visualize +``` -For more details on model loading, refer to the `get_livertumor_model_from_hf` function within [`experiments/utils/helpers.py`](experiments/utils/helpers.py). +#### Arguments -## Running Experiments + * `--data_id `: The specific index (`0`-`999`) of the data instance from the Hugging Face dataset to analyze. + * `--save_dir `: Data directory where experiment results are saved. + * `--n_trials `: Number of trials for hyperparameter optimization (HPO). Defaults to `100`. + * `--analyze`: Runs the HPO process and saves the raw results to `/raw/.pkl`. + * `--visualize`: Loads the previously saved results for the specified `--data_id` and generates a visualization PDF comparing default vs. optimized attributions and metrics. Saves the figure to `/figures/.pdf`. Requires results to be saved first (using `--analyze`). -This repository contains code for various experiments presented in the PnPXAI paper. Each experiment can typically be run using scripts located in [`experiments/scripts/`](experiments/scripts/). +#### Output -### Experiment 1: Liver Tumor Explanation +Results will be saved under the `` directory, organized by data instance ID. + +--- + +### Experiment 2: Hyperparameter Impact Analysis + +This experiment evaluates a grid of hyperparameter combinations for various explainers on a subset of the ImageNet validation set and generates plots comparing the impact on evaluation metrics. + +#### Data and Model + + * **Data (ImageNet):** This script requires the **ImageNet 1k dataset**. You must download it from the [official site](https://image-net.org/download.php) (requires registration). The script assumes the validation set is organized in the `data/ImageNet/` directory as follows. The `docker run` command in the Setup section already includes the recommended mounts for these paths. + ``` + data/ + └── ImageNet/ + ├── ImageNet1k/ + │ └── val/ + │ └── val/ + │ └── ILSVRC2012_val_IDX.JPEG + └── ImageNet1k_info/ + ├── ImageNet_class_index.json + └── ImageNet_val_label.txt + ``` + + * **Model (ResNet-18):** This script uses a standard `ResNet-18` model pre-trained on ImageNet, loaded directly from `torchvision.models`. + +#### Usage + +```bash +python -m experiments.scripts.analyze_imagenet_hpo_impact \ + --data_dir data/ImageNet \ + --batch_size 4 \ + --analyze \ + --visualize \ + --eval_explainer smooth_grad lrp_epsilon_gamma_box guided_grad_cam integrated_gradients +``` + +#### Arguments + + * `--data_dir `: Path to the root `ImageNet` directory (which contains `ImageNet1k` and `ImageNet1k_info`). + * `--analyze`: Runs the full grid search evaluation and saves the raw metric results. + * `--visualize`: loads the raw results and generates the final plots. + * `--eval_explainer `: A space-separated list of explainers to analyze (e.g., `smooth_grad`, `guided_grad_cam`). + +#### Note on `batch_size` + + * This experiment runs on a 128-image subset (default for `--data_to 128`). + * Most explainers (e.g., `smooth_grad`, `guided_grad_cam`, `lrp_epsilon_gamma_box`) can run with a large batch size, e.g., `--batch_size 128`. + * `integrated_gradients` is memory-intensive and may require a smaller batch size (e.g., `--batch_size 4`), depending on your GPU. + +#### Output + +Raw results (`.pkl`, `.csv`) will be saved under the `results/hpo_impact_imagenet/raw/resnet18/` directory, and the generated figures (`.pdf`) will be saved in `results/hpo_impact_imagenet/figures/resnet18/`. + +--- + +### Experiment 3: Liver Tumor Explanation This experiment analyzes the effect of HPO (optimizing for AbPC) on explanations for a liver tumor CT slice, evaluating the change in ground truth agreement (Relevance Mass/Rank Accuracy). -**Usage:** +#### Data and Model + + * **Data (Liver Tumor):** The **Liver Tumor Classification dataset** used in this experiment is hosted on Hugging Face Hub: [➡️ seongun/liver-tumor-classification](https://huggingface.co/datasets/seongun/liver-tumor-classification). This dataset contains individual 2D CT scan slices derived from the original [LiTS dataset](https://doi.org/10.1016/j.media.2022.102680). The script **automatically downloads** the necessary files when first executed. For more details on the data loading process, refer to the `get_livertumor_dataset_from_hf` function within [`experiments/utils/datasets.py`](./experiments/utils/datasets.py) + + * **Model (ResNet-50 Liver Tumor):** The pre-trained **ResNet-50 model** adapted for this task is hosted on Hugging Face Hub: [➡️ seongun/resnet50-livertumor](https://huggingface.co/seongun/resnet50-livertumor). Similar to the dataset, the script **automatically downloads** the model weights. The model architecture is defined in [`experiments/models/liver_tumor.py`](./experiments/models/liver_tumor.py). For more details on model loading, refer to the `get_livertumor_model_from_hf` function within [`experiments/utils/models.py`](./experiments/utils/models.py). + +#### Usage ```bash python -m experiments.scripts.analyze_livertumor_hpo \ @@ -115,15 +201,145 @@ python -m experiments.scripts.analyze_livertumor_hpo \ --visualize ``` -**Arguments:** +#### Arguments + * `--data_id `: The specific index (e.g., `2280`) of the data instance from the Hugging Face dataset to analyze. * `--n_trials `: Number of trials for hyperparameter optimization (HPO). Defaults to `100`. * `--analyze`: Runs the HPO process and saves the raw results (`.pkl` files for default run, and optimized run) to `results/hpo_analysis_livertumor/raw//`. * `--visualize`: Loads the previously saved results for the specified `--data_id` and generates a visualization PDF comparing default vs. optimized attributions and metrics. Saves the figure to `results/hpo_analysis_livertumor/figures/.pdf`. Requires results to be saved first (using `--analyze`). -**Output:** +#### Output + Results will be saved under the `results/hpo_analysis_livertumor/` directory, organized by data instance ID. +--- + +### Experiment 4: Acute Kidney Injury (AKI) Explanation + +This experiment analyzes the effect of HPO (optimizing for AbPC) on explanations for medical data for acute kidney injury (AKI) detection, evaluating the change in ground truth agreement (Relevance Mass/Rank Accuracy). + +#### Data and Model + + * **Data (MIMIC III):** The **MIMIC III dataset** used in this experiment is hosted on PhysioNet: [➡️ MIMIC-III Clinical Database](https://physionet.org/content/mimiciii/1.4/). This work utilizes the latest version of [MIMIC III dataset](https://doi.org/10.13026/C2XW26). To use the analysis script, the dataset needs to be downloaded, built and formatted. Having downloaded the dataset from the official source [MIMIC III dataset](https://doi.org/10.13026/C2XW26), users are prompted to build the PostgreSQL version of the dataset with the official [Github code](https://github.com/MIT-LCP/mimic-code/tree/main/mimic-iii/buildmimic/postgres). Subsequently, the built dataset can be formatted with the set of scripts listed in [`/data/mimiciii`](./data/mimiciii/) directory. Thorough instructions on data transformation are provided in [`README.md`](./data/mimiciii/README.md). Provided that formatted data is generated, the analysis script **loads** the necessary files when first executed. For more details on the data loading process, refer to the `get_aki_dataset` function within [`experiments/utils/datasets.py`](./experiments/utils/datasets.py). + + * **Model (AKI Classifier):** The pre-trained **Linear model** adapted for this task is hosted on Hugging Face Hub: [➡️ enver1323/aki-classifier](https://huggingface.co/enver1323/aki-classifier). Similar to the dataset, the script **automatically downloads** the model weights. The model architecture is defined in [`experiments/models/aki.py`](./experiments/models/aki.py). For more details on model loading, refer to the `get_aki_model_from_hf` function within [`experiments/utils/models.py`](./experiments/utils/models.py). + +#### Usage + +```bash +python -m experiments.scripts.analyze_aki_hpo \ + --n_trials 100 \ + --analyze \ + --visualize +``` + +#### Arguments + + * `--n_trials `: Number of trials for hyperparameter optimization (HPO). Defaults to `20`. + * `--analyze`: Runs the HPO process and saves the top-K columns as well as attributions (`.json` and `.npy` files correspondingly for default run, and optimized run) to `results/hpo_analysis_aki/topk//`. + * `--visualize`: Loads the previously saved results and generates a visualization PDF comparing default vs. optimized attributions and metrics. Saves the figure to `results/hpo_analysis_aki/explanation_summary.pdf`. Requires results to be saved first (using `--analyze`). + +#### Output + +Results will be saved under the `results/hpo_analysis_aki/` directory, organized by explainer name. + +--- + +### Experiment 5: ECG Explanation + +This experiment analyzes the effects of HPO (optimizing for multiple metrics) on explanations for a ECG time series dataset, evaluating the change in metric values. + +#### Data and Model + + * **Data (ECG):** The **ECG dataset** used in this experiment is hosted on Hugging Face Hub: [➡️ enver1323/ucr-twoleadecg](https://huggingface.co/datasets/enver1323/ucr-twoleadecg). This dataset contains time series ecg segments derived from the original [UCR dataset](https://doi.org/10.48550/arXiv.1810.07758). The script **automatically downloads** the necessary files when first executed. For more details on the data loading process, refer to the `get_ecg_dataset_from_hf` function within [`experiments/utils/datasets.py`](./experiments/utils/datasets.py) + + * **Model (ResNetPlus):** The pre-trained **ResNetPlus model** adapted for this task is hosted on Hugging Face Hub: [➡️ enver1323/resnetplus-classification-ecg](https://huggingface.co/enver1323/resnetplus-classification-ecg). Similar to the dataset, the script **automatically downloads** the model weights. The model architecture is defined in [`experiments/models/ecg/resnet_plus.py`](./experiments/models/ecg/resnet_plus.py). For more details on model loading, refer to the `get_ecg_resnet_from_hf` function within [`experiments/utils/models.py`](./experiments/utils/models.py). + + * **Model (PatchTST):** The pre-trained **PatchTST model** adapted for this task is hosted on Hugging Face Hub: [➡️ enver1323/patchtst-classification-ecg](https://huggingface.co/enver1323/patchtst-classification-ecg). Similar to the dataset, the script **automatically downloads** the model weights. The model architecture is defined in [`experiments/models/ecg/patchtst.py`](./experiments/models/ecg/patchtst.py). For more details on model loading, refer to the `get_ecg_patchtst_from_hf` function within [`experiments/utils/models.py`](./experiments/utils/models.py). + +#### Usage + +```bash +python -m experiments.scripts.analyze_ecg_hpo \ + --model resnet_plus \ + --out_file results/hpo_analysis_ecg/explanations_summary.csv +``` + +#### Arguments + + * `--model `: The name of model (`resnet_plus`, `patchtst`) to analyze. + * `--out_file `: The name of the output file to store the explanation summary. The value defaults to `results/hpo_analysis_ecg/explanations_summary.csv`. + +#### Output + +Results will be saved to the file path specified in the `FILENAME` of `--out_file` argument. + +--- + +### Experiment 6: Wine Quality Explanation + +This experiment compares multiple XAI frameworks (PnPXAI, Captum, OmniXAI, OpenXAI, AutoXAI) on the Wine Quality dataset using various models and explainer methods. It evaluates explanations using Faithfulness, Complexity, and their Composite score. + +#### Data and Model + * **Data (Wine Quality):** The **Wine Quality dataset** containing ~6,497 samples (white and red wine combined) for binary classification (good vs. bad quality). + + * **Model (XGBoost & TabResNet):** + * **XGBoost:** A gradient boosting classifier trained on the tabular features. + * **TabResNet:** A ResNet-like architecture adapted for tabular data. + Pre-trained weights for both models are included in the `data/wine_quality/` directory. + +#### Setup for Wine Quality Experiment + +Due to dependency conflicts between frameworks, this experiment requires a dedicated Docker environment separate from the main setup. + +Please build the specific Docker image using the provided [`Dockerfile.wine_quality`](`./Dockerfile.wine_quality`): + +```bash +# Build the Wine Quality Docker image +docker build -t pnpxai_wine_quality:latest -f Dockerfile.wine_quality . +``` + +#### Usage + +1. **Run the container:** + Start the interactive container with GPU support and volume mounting. + ```bash + docker run --rm -it \ + --runtime=nvidia \ + --gpus all \ + --shm-size=8g \ + -v $(pwd):/root/pnpxai-experiments \ + pnpxai_wine_quality:latest + ``` + +2. **Run the experiment:** + Inside the container, execute the analysis script: + ```bash + python -m experiments.scripts.analyze_wine_quality \ + --n_samples 25 \ + --seed 42 \ + --verbose \ + --data_dir data/wine_quality \ + --config_dir experiments/configs/tabular \ + --results_dir results/wine_quality + ``` + +#### Arguments + + * `--n_samples `: Number of samples for sampling-based explainers (LIME/SHAP). Defaults to `25`. + * `--seed `: Random seed for reproducibility. Defaults to `42`. + * `--verbose`: Enable detailed logging. + * `--data_dir `: Path to data directory. Defaults to `data/wine_quality`. + * `--config_dir `: Path to config directory. Defaults to `experiments/configs/tabular`. + * `--results_dir `: Path to results directory. Defaults to `results/wine_quality`. + +#### Output + +The experiment will generate the following in the `results/wine_quality/` directory: + + * **Individual explanations:** Saved in `results/wine_quality/{model}/{framework}/{explainer}/` as `.npy` files (explanations and metric scores). + * **Summary table:** `experiment_result.md` containing a LaTeX table comparing Faithfulness, Complexity, and Composite scores across all frameworks. + * **Execution log:** `experiment.log` (if verbose logging is enabled or configured). ## Citation @@ -132,4 +348,12 @@ Will be updated later. ## License -This project's code is licensed under the [MIT License](https://www.google.com/search?q=LICENSE). The used dataset is derived from LiTS and retains its original [CC-BY-NC-SA-4.0 License](https://creativecommons.org/licenses/by-nc-sa/4.0/). +This project's code is licensed under the [MIT License](https://www.google.com/search?q=LICENSE). + +The datasets used in the experiments are derived from existing benchmarks and are subject to their original licenses: + +* **ImageNet:** Subject to the [ImageNet Terms of Access](https://image-net.org/download.php). The dataset is restricted to non-commercial research and educational purposes only. Users must obtain access via the official website and agree to the Terms of Access. +* **LiTS (Liver Tumor):** [CC-BY-NC-SA-4.0 License](https://creativecommons.org/licenses/by-nc-sa/4.0/) +* **MIMIC-III (AKI):** Subject to the [PhysioNet Credentialed Health Data License 1.5.0](https://physionet.org/content/mimiciii/view-license/1.4/). Due to license restrictions, we do not distribute the data. Users must obtain access via PhysioNet and agree to the data use agreement. +* **ECG:** Derived from the UCR Time Series Classification Archive. Free for research and educational use. ([UCR Archive](https://www.cs.ucr.edu/~eamonn/time_series_data_2018/)) +* **Wine Quality:** [CC BY 4.0 License](https://creativecommons.org/licenses/by/4.0/) diff --git a/data/mimiciii/README.md b/data/mimiciii/README.md new file mode 100644 index 0000000..0395136 --- /dev/null +++ b/data/mimiciii/README.md @@ -0,0 +1,85 @@ +# MIMIC III Data Generation + +## Data Loading + +[MIMIC III Clinical Database](https://doi.org/10.13026/C2XW26) is a large database of anonymized data of more than forty thousand patients. The data provided at the source, should first be downloaded, and built. The building process is provided by the official [Github package](https://github.com/MIT-LCP/mimic-code/tree/main/mimic-iii/buildmimic/postgres). We utilize PostgreSQL version of the built database for quick and convenient data querying. + +## Data Formatting + +Having built the MIMIC III on a PostgreSQL DBMS, the data needs to be preprocessed. This process comprises three essential stages: +* Database parsing +* Preprocessing +* Data cleanup + +### Database Parsing + +The [`parse_db.py`](./parse_db.py) script connects to the database to build essential materialized views, which could further be quieried for quick data extraction. + +#### Usage + +```bash +python parse_db.py \ + --host localhost \ + --db mimic \ + --user postgres \ + --password postgres \ + --out_path ./formatted +``` + +#### Arguments + + * `--host `: The host of PostgreSQL DBMS containing build MIMIC III database. The default value is set to `localhost`. + * `--db `: The name of the database of PostgreSQL DBMS, containing build MIMIC III database. The default value is set to `mimic`. + * `--user `: The user owning the PostgreSQL database, containing build MIMIC III database. The default value is set to `postgres`. + * `--password `: The password of the user owning the PostgreSQL database, containing build MIMIC III database. The default value is set to `postgres`. + * `--out_path `: The target directory for intermediate files, produced by the script. The default value is set to `./formatted`. + +#### Output + +Results will be saved under the path, specified in `OUT_PATH`. The list of files in the directory is expected to consist of: + * `AKI_KIDIGO_7D_SQL_CREATININE_DBSOURCE.csv` + * `labstay_DBSOURCE.csv` + * `chart_vitals_stay_DBSOURCE.csv` + * `comorbidities_DBSOURCE.csv` + +### Preprocessing + +The [`preprocess.py`](./preprocess.py) script combines the data from the previous step with the source data files to build the pre-final version of the dataset. + +#### Usage + +```bash +python preprocess.py \ + --formatted_path ./formatted \ + --data_path ./data +``` + +#### Arguments + + * `--formatted_path `: The path, where the files, produced by `parse_db.py` are stored. The default value is set to `./formatted`. + * `--data_path `: The path to gzipped MIMIC III data. The default value is set to `./data`. + +#### Output + +Results will be saved under the path, specified in `FORMATTED_PATH`. The list of files in the directory is expected to be extended by `INFO_DATASET_7days_creatinine.csv`. + +### Data Cleanup + +The [`cleanup.py`](./cleanup.py) script cleans up the data from the previous step and prepares it for the analysis. + +#### Usage + +```bash +python cleanup.py \ + --data_path ./formatted/INFO_DATASET_7days_creatinine.csv \ + --formatted_path ./formatted/data.csv +``` + +#### Arguments + + * `--data_path `: The path, where the file, produced by `preprocess.py` (`INFO_DATASET_7days_creatinine.csv`) is stored. The default value is set to `./formatted/INFO_DATASET_7days_creatinine.csv`. + * `--formatted_path `: The target path to store the final version of the data, ready for analysis. The default value is set to `./formatted/data.csv`. + +#### Output + +Results will be saved under the path, specified in `FORMATTED_PATH`. The list of files in the directory is expected to be extended by `data.csv`. diff --git a/data/mimiciii/cleanup.py b/data/mimiciii/cleanup.py new file mode 100644 index 0000000..1d2e22a --- /dev/null +++ b/data/mimiciii/cleanup.py @@ -0,0 +1,197 @@ +from argparse import ArgumentParser +import pandas as pd +import os + + +def code_ethnicity(ethinicity: str) -> int: + if ethinicity in { + "UNKNOWN/NOT SPECIFIED", + "OTHER", + "PATIENT DECLINED TO ANSWER", + "MULTI RACE ETHNICITY", + "UNABLE TO OBTAIN", + }: + return -1 + if ethinicity in { + "WHITE", + "WHITE - RUSSIAN", + "WHITE - EASTERN EUROPEAN", + "WHITE - OTHER EUROPEAN", + "WHITE - BRAZILIAN", + }: + return 0 + if ethinicity in { + "BLACK/AFRICAN AMERICAN", + "BLACK/AFRICAN", + "BLACK/HAITIAN", + "BLACK/CAPE VERDEAN", + }: + return 1 + if ethinicity in { + "ASIAN", + "ASIAN - ASIAN INDIAN", + "ASIAN - VIETNAMESE", + "ASIAN - CHINESE", + "ASIAN - FILIPINO", + "ASIAN - CAMBODIAN", + "ASIAN - THAI", + "ASIAN - OTHER", + "ASIAN - KOREAN", + "ASIAN - JAPANESE", + }: + return 2 + if ethinicity in { + "HISPANIC OR LATINO", + "HISPANIC/LATINO - GUATEMALAN", + "HISPANIC/LATINO - PUERTO RICAN", + "HISPANIC/LATINO - DOMINICAN", + "HISPANIC/LATINO - SALVADORAN", + "HISPANIC/LATINO - COLOMBIAN", + "PORTUGUESE", + "HISPANIC/LATINO - CENTRAL AMERICAN (OTHER)", + "HISPANIC/LATINO - HONDURAN", + "HISPANIC/LATINO - CUBAN", + "HISPANIC/LATINO - MEXICAN", + }: + return 3 + if ethinicity in { + "AMERICAN INDIAN/ALASKA NATIVE", + "AMERICAN INDIAN/ALASKA NATIVE FEDERALLY RECOGNIZED TRIBE", + }: + return 4 + if ethinicity in {"CARIBBEAN ISLAND", "NATIVE HAWAIIAN OR OTHER PACIFIC ISLANDER"}: + return 5 + if ethinicity == "SOUTH AMERICAN": + return 6 + if ethinicity == "MIDDLE EASTERN": + return 7 + + raise NotImplementedError() + + +def code_system(system: str) -> int: + if system == "carevue": + return 0 + + if system == "metavision": + return 1 + + raise NotImplementedError() + + +def code_gender(gender: str) -> int: + return int(gender == "M") + + +def cleanup_data(filename: str) -> pd.DataFrame: + # read the data from the CSV + df = pd.read_csv(open(filename, "r"), delimiter=",") + df.columns = map(str.upper, df.columns) + print(df.shape) + + # exclude CKD and AKI on admission patients + df = df[~(df["AKI"] == 2)] + df = df[~(df["AKI"] == 3)] + df = df[~(df["AKI"] == 4)] + + print(df.groupby("AKI")["ICUSTAY_ID"].nunique()) + + # Consider only adults + df = df[~(df["AGE"] < 18)] + + df["ETHNICITY"] = df["ETHNICITY"].apply(lambda x: code_ethnicity(x)) + df["GENDER"] = df["GENDER"].apply(lambda x: code_gender(x)) + + print(df.groupby("ETHNICITY")["ICUSTAY_ID"].nunique()) + + df = df.rename( + columns={ + "HADM_ID_X": "HADM_ID", + "GLUCOSE_MIN_X": "GLUCOSE_MIN", + "GLUCOSE_MAX_X": "GLUCOSE_MAX", + "SUBJECT_ID_Y": "SUBJECT_ID", + "SUBJECT_ID_X.1": "SUBJECT_ID", + "DBSOURCE_Y": "DBSOURCE", + } + ) + df = df.fillna(0) + + df = df.drop(df.columns[1], axis=1) + + print(df.groupby("AKI")["ICUSTAY_ID"].nunique()) + print(df.groupby("AKI_STAGE_7DAY")["ICUSTAY_ID"].nunique()) + + print( + "Non AKI Patients : {}".format( + df.loc[df["AKI_STAGE_7DAY"] == 0]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI patients STAGE 1: {}".format( + df.loc[df["AKI_STAGE_7DAY"] == 1]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI Patients STAGE 2: {}".format( + df.loc[df["AKI_STAGE_7DAY"] == 2]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI Patients STAGE 3: {}".format( + df.loc[df["AKI_STAGE_7DAY"] == 3]["ICUSTAY_ID"].count() + ) + ) + print("NAN patients: {}".format(df["AKI"].isna().sum())) + + df = df.drop( + [ + "ADMITTIME", + "DISCHTIME", + "OUTTIME", + "INTIME", + "DOB", + "CHARTTIME_CREAT", + "UNNAMED: 0", + "AKI_STAGE_CREAT", + "AKI_7DAY", + "GLUCOSE_MAX_Y", + "GLUCOSE_MIN_Y", + "DBSOURCE_X", + ], + axis=1, + ) + + if isinstance(df["DBSOURCE"], pd.DataFrame): + df["DBSOURCE_NEW"] = df["DBSOURCE"].iloc[:, 0] + df = df.drop(["DBSOURCE"], axis=1) + df = df.rename(columns={"DBSOURCE_NEW": "DBSOURCE"}) + + df = df[~(df["DBSOURCE"] == "both")] + + df["DBSOURCE"] = df["DBSOURCE"].apply(lambda x: code_system(x)) + + return df + + +def main(): + parser = ArgumentParser() + parser.add_argument( + "--data_path", + type=str, + default="./formatted/INFO_DATASET_7days_creatinine.csv", + help='Path to formatted MIMIC III data from "preprocess.py"', + ) + parser.add_argument( + "--formatted_path", + type=str, + default="./formatted/data.csv", + help="Output path to store cleaned data", + ) + args = parser.parse_args() + + df = cleanup_data(args.data_path) + df.to_csv(args.formatted_path) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/data/mimiciii/parse_db.py b/data/mimiciii/parse_db.py new file mode 100644 index 0000000..81da442 --- /dev/null +++ b/data/mimiciii/parse_db.py @@ -0,0 +1,1020 @@ +import os +import argparse + +import pandas as pd + +import psycopg2 + + +def urine_output(cursor): + + view = "DROP MATERIALIZED VIEW IF EXISTS urineoutput CASCADE; \ + CREATE MATERIALIZED VIEW urineoutput as \ + select oe.icustay_id, oe.charttime \ + , SUM( \ + case when oe.itemid = 227488 then -1*value \ + else value end \ + ) as value \ + from mimiciii.outputevents oe \ + where oe.itemid in \ + ( \ + 40055, \ + 43175, \ + 40069, \ + 40094, \ + 40715, \ + 40473, \ + 40085, \ + 40057, \ + 40056, \ + 40405, \ + 40428, \ + 40086, \ + 40096, \ + 40651, \ + 226559, \ + 226560, \ + 226561, \ + 226584, \ + 226563, \ + 226564, \ + 226565, \ + 226567, \ + 226557, \ + 226558, \ + 227488, \ + 227489 \ + ) \ + and oe.value < 5000 \ + and oe.icustay_id is not null \ + group by icustay_id, charttime;" + + cursor.execute(view) + + +def creatinine(cursor): + + view = "DROP MATERIALIZED VIEW IF EXISTS kdigo_creat CASCADE; \ + CREATE MATERIALIZED VIEW kdigo_creat as \ + with cr as \ + ( \ + select \ + ie.icustay_id \ + , ie.intime, ie.outtime \ + , le.valuenum as creat \ + , le.charttime \ + , ie.DBSOURCE \ + from mimiciii.icustays ie \ + left join mimiciii.labevents le \ + on ie.subject_id = le.subject_id \ + and le.ITEMID = 50912 \ + and le.VALUENUM is not null \ + and le.CHARTTIME between (ie.intime - interval '7' day) and (ie.intime + interval '7' day) \ + ) \ + SELECT \ + cr.icustay_id \ + , cr.charttime \ + , cr.creat \ + , MIN(cr48.creat) AS creat_low_past_48hr \ + , MIN(cr7.creat) AS creat_low_past_7day \ + FROM cr \ + LEFT JOIN cr cr48 \ + ON cr.icustay_id = cr48.icustay_id \ + AND cr48.charttime < cr.charttime \ + AND cr48.charttime >= (cr.charttime - INTERVAL '48' HOUR) \ + LEFT JOIN cr cr7 \ + ON cr.icustay_id = cr7.icustay_id \ + AND cr7.charttime < cr.charttime \ + AND cr7.charttime >= (cr.charttime - INTERVAL '7' DAY) \ + GROUP BY cr.icustay_id, cr.charttime, cr.creat \ + ORDER BY cr.icustay_id, cr.charttime, cr.creat;" + + cursor.execute(view) + + +def echo_data(cursor): + + # -- This code extracts structured data from echocardiographies + # -- You can join it to the text notes using ROW_ID + # -- Just note that ROW_ID will differ across versions of MIMIC-III. + + view = "DROP MATERIALIZED VIEW IF EXISTS echodata CASCADE; \ + CREATE MATERIALIZED VIEW echodata AS \ + select ROW_ID \ + , subject_id, hadm_id \ + , chartdate \ + , cast(to_timestamp( (to_char( chartdate, 'DD-MM-YYYY' ) || substring(ne.text, 'Date/Time: [\[\]0-9*-]+ at ([0-9:]+)')), \ + 'DD-MM-YYYYHH24:MI') as timestamp without time zone) \ + as charttime \ + , substring(ne.text, 'Indication: (.*?)\n') as Indication \ + , case \ + when substring(ne.text, 'Height: \(in\) (.*?)\n') like '%*%' \ + then null \ + else cast(substring(ne.text, 'Height: \(in\) (.*?)\n') as numeric) \ + end as Height \ + , case \ + when substring(ne.text, 'Weight \(lb\): (.*?)\n') like '%*%' \ + then null \ + else cast(substring(ne.text, 'Weight \(lb\): (.*?)\n') as numeric) \ + end as Weight \ + , case \ + when substring(ne.text, 'BSA \(m2\): (.*?) m2\n') like '%*%' \ + then null \ + else cast(substring(ne.text, 'BSA \(m2\): (.*?) m2\n') as numeric) \ + end as BSA \ + , substring(ne.text, 'BP \(mm Hg\): (.*?)\n') as BP \ + , case \ + when substring(ne.text, 'BP \(mm Hg\): ([0-9]+)/[0-9]+?\n') like '%*%' \ + then null \ + else cast(substring(ne.text, 'BP \(mm Hg\): ([0-9]+)/[0-9]+?\n') as numeric) \ + end as BPSys \ + , case \ + when substring(ne.text, 'BP \(mm Hg\): [0-9]+/([0-9]+?)\n') like '%*%' \ + then null \ + else cast(substring(ne.text, 'BP \(mm Hg\): [0-9]+/([0-9]+?)\n') as numeric) \ + end as BPDias \ + , case \ + when substring(ne.text, 'HR \(bpm\): ([0-9]+?)\n') like '%*%' \ + then null \ + else cast(substring(ne.text, 'HR \(bpm\): ([0-9]+?)\n') as numeric) \ + end as HR \ + , substring(ne.text, 'Status: (.*?)\n') as Status \ + , substring(ne.text, 'Test: (.*?)\n') as Test \ + , substring(ne.text, 'Doppler: (.*?)\n') as Doppler \ + , substring(ne.text, 'Contrast: (.*?)\n') as Contrast \ + , substring(ne.text, 'Technical Quality: (.*?)\n') as TechnicalQuality \ + from mimiciii.noteevents ne \ + where category = 'Echo';" + + cursor.execute(view) + + +def weight_duration(cursor): + + # -- This query extracts weights for ICU patients with start/stop times + # -- if only an admission weight is given, then this is assigned from intime to outtime + + view = " DROP MATERIALIZED VIEW IF EXISTS weightdurations CASCADE; \ + CREATE MATERIALIZED VIEW weightdurations as \ + WITH wt_neonate AS \ + (\ + SELECT c.icustay_id, c.charttime \ + , MAX(CASE WHEN c.itemid = 3580 THEN c.valuenum END) as wt_kg \ + , MAX(CASE WHEN c.itemid = 3581 THEN c.valuenum END) as wt_lb \ + , MAX(CASE WHEN c.itemid = 3582 THEN c.valuenum END) as wt_oz \ + FROM mimiciii.chartevents c \ + WHERE c.itemid in (3580, 3581, 3582) \ + AND c.icustay_id IS NOT NULL \ + AND c.error IS DISTINCT FROM 1 \ + AND c.valuenum > 0 \ + GROUP BY c.icustay_id, c.charttime \ + ) \ + , birth_wt AS \ + ( \ + SELECT c.icustay_id, c.charttime \ + , MAX( \ + CASE \ + WHEN c.itemid = 4183 THEN \ + CASE \ + WHEN c.value ~ '[^0-9\.]' THEN NULL \ + WHEN CAST(c.value AS NUMERIC) > 100 THEN CAST(c.value AS NUMERIC)/1000 \ + WHEN CAST(c.value AS NUMERIC) < 10 THEN CAST(c.value AS NUMERIC) \ + ELSE NULL END \ + WHEN c.itemid = 3723 AND c.valuenum < 10 THEN c.valuenum \ + ELSE NULL END) as wt_kg \ + FROM mimiciii.chartevents c \ + WHERE c.itemid in (3723, 4183) \ + AND c.icustay_id IS NOT NULL \ + AND c.error IS DISTINCT FROM 1 \ + GROUP BY c.icustay_id, c.charttime \ + ) \ + , wt_stg as \ + ( \ + SELECT \ + c.icustay_id \ + , c.charttime \ + , case when c.itemid in (762,226512) then 'admit' \ + else 'daily' end as weight_type \ + , c.valuenum as weight \ + FROM mimiciii.chartevents c \ + WHERE c.valuenum IS NOT NULL \ + AND c.itemid in \ + ( \ + 762,226512 \ + , 763,224639 \ + ) \ + AND c.icustay_id IS NOT NULL \ + AND c.valuenum > 0 \ + AND c.error IS DISTINCT FROM 1 \ + UNION ALL \ + SELECT \ + n.icustay_id \ + , n.charttime \ + , 'daily' AS weight_type \ + , CASE \ + WHEN wt_kg IS NOT NULL THEN wt_kg \ + WHEN wt_lb IS NOT NULL THEN wt_lb*0.45359237 + wt_oz*0.0283495231 \ + ELSE NULL END AS weight \ + FROM wt_neonate n \ + UNION ALL \ + SELECT \ + b.icustay_id \ + , b.charttime \ + , 'admit' AS weight_type \ + , wt_kg as weight \ + FROM birth_wt b \ + ) \ + , wt_stg1 as \ + ( \ + select \ + icustay_id \ + , charttime \ + , weight_type \ + , weight \ + , ROW_NUMBER() OVER (partition by icustay_id, weight_type order by charttime) as rn \ + from wt_stg \ + WHERE weight IS NOT NULL \ + ) \ + , wt_stg2 AS \ + ( \ + SELECT \ + wt_stg1.icustay_id \ + , ie.intime, ie.outtime \ + , ie.DBSOURCE \ + , case when wt_stg1.weight_type = 'admit' and wt_stg1.rn = 1 \ + then ie.intime - interval '2' hour \ + else wt_stg1.charttime end as starttime \ + , wt_stg1.weight \ + from wt_stg1 \ + INNER JOIN mimiciii.icustays ie \ + on ie.icustay_id = wt_stg1.icustay_id \ + ) \ + , wt_stg3 as \ + ( \ + select \ + icustay_id \ + , intime, outtime \ + , starttime \ + , DBSOURCE \ + , coalesce( \ + LEAD(starttime) OVER (PARTITION BY icustay_id ORDER BY starttime), \ + outtime + interval '2' hour \ + ) as endtime \ + , weight \ + from wt_stg2 \ + ) \ + , wt1 as \ + ( \ + select \ + icustay_id \ + , starttime \ + , DBSOURCE \ + , coalesce(endtime, \ + LEAD(starttime) OVER (partition by icustay_id order by starttime), \ + outtime + interval '2' hour) \ + as endtime \ + , weight \ + from wt_stg3 \ + ) \ + , wt_fix as \ + ( \ + select ie.icustay_id \ + , ie.intime - interval '2' hour as starttime \ + , wt.starttime as endtime \ + , wt.weight \ + from mimiciii.icustays ie \ + inner join \ + ( \ + SELECT wt1.icustay_id, wt1.starttime, wt1.weight , wt1.DBSOURCE \ + , ROW_NUMBER() OVER (PARTITION BY wt1.icustay_id ORDER BY wt1.starttime) as rn \ + FROM wt1 \ + ) wt \ + ON ie.icustay_id = wt.icustay_id \ + AND wt.rn = 1 \ + and ie.intime < wt.starttime \ + ) \ + , wt2 as \ + ( \ + select \ + wt1.icustay_id \ + , wt1.starttime \ + , wt1.endtime \ + , wt1.weight \ + from wt1 \ + UNION \ + SELECT \ + wt_fix.icustay_id \ + , wt_fix.starttime \ + , wt_fix.endtime \ + , wt_fix.weight \ + from wt_fix \ + ) \ + , echo_lag as \ + ( \ + select \ + ie.icustay_id \ + , ie.intime, ie.outtime \ + , 0.453592*ec.weight as weight_echo \ + , ROW_NUMBER() OVER (PARTITION BY ie.icustay_id ORDER BY ec.charttime) as rn \ + , ec.charttime as starttime \ + , LEAD(ec.charttime) OVER (PARTITION BY ie.icustay_id ORDER BY ec.charttime) as endtime \ + from mimiciii.icustays ie \ + inner join echodata ec \ + on ie.hadm_id = ec.hadm_id \ + where ec.weight is not null \ + ) \ + , echo_final as \ + ( \ + select \ + el.icustay_id \ + , el.starttime \ + , coalesce(el.endtime, el.outtime + interval '2' hour) as endtime \ + , weight_echo \ + from echo_lag el \ + UNION \ + select \ + el.icustay_id \ + , el.intime - interval '2' hour as starttime \ + , el.starttime as endtime \ + , el.weight_echo \ + from echo_lag el \ + where el.rn = 1 \ + and el.starttime > el.intime - interval '2' hour \ + ) \ + select \ + wt2.icustay_id, wt2.starttime, wt2.endtime, wt2.weight\ + from wt2 \ + UNION \ + select \ + ef.icustay_id, ef.starttime, ef.endtime, ef.weight_echo as weight \ + from echo_final ef \ + where ef.icustay_id not in (select distinct icustay_id from wt2) \ + order by icustay_id, starttime, endtime;" + + cursor.execute(view) + + +def urine_kidigo(cursor): + + # -- we have joined each row to all rows preceding within 24 hours \ + # -- we can now sum these rows to get total UO over the last 24 hours \ + # -- we can use case statements to restrict it to only the last 6/12 hours \ + # -- therefore we have three sums: \ + # -- 1) over a 6 hour period \ + # -- 2) over a 12 hour period \ + # -- 3) over a 24 hour period \ + # -- note that we assume data charted at charttime corresponds to 1 hour of UO \ + # -- therefore we use '5' and '11' to restrict the period, rather than 6/12 \ + # -- this assumption may overestimate UO rate when documentation is done less than hourly \ + # -- 6 hours \ + + view = " DROP MATERIALIZED VIEW IF EXISTS kdigo_uo CASCADE; \ + CREATE MATERIALIZED VIEW kdigo_uo AS \ + with ur_stg as \ + ( \ + select io.icustay_id, io.charttime \ + , sum(case when io.charttime <= iosum.charttime + interval '5' hour \ + then iosum.VALUE \ + else null end) as UrineOutput_6hr \ + , sum(case when io.charttime <= iosum.charttime + interval '11' hour \ + then iosum.VALUE \ + else null end) as UrineOutput_12hr \ + , sum(iosum.VALUE) as UrineOutput_24hr \ + , ROUND(CAST(EXTRACT(EPOCH FROM \ + io.charttime - \ + MIN(case when io.charttime <= iosum.charttime + interval '5' hour \ + then iosum.charttime \ + else null end) \ + )/3600.0 AS NUMERIC), 4) AS uo_tm_6hr \ + , ROUND(CAST(EXTRACT(EPOCH FROM \ + io.charttime - \ + MIN(case when io.charttime <= iosum.charttime + interval '11' hour \ + then iosum.charttime \ + else null end) \ + )/3600.0 AS NUMERIC), 4) AS uo_tm_12hr \ + , ROUND(CAST(EXTRACT(EPOCH FROM \ + io.charttime - MIN(iosum.charttime) \ + )/3600.0 AS NUMERIC), 4) AS uo_tm_24hr \ + from urineoutput io \ + left join urineoutput iosum \ + on io.icustay_id = iosum.icustay_id \ + and io.charttime >= iosum.charttime \ + and io.charttime <= (iosum.charttime + interval '23' hour) \ + group by io.icustay_id, io.charttime \ + ) \ + select \ + ur.icustay_id \ + , ur.charttime \ + , wd.weight \ + , ur.UrineOutput_6hr \ + , ur.UrineOutput_12hr \ + , ur.UrineOutput_24hr \ + , ROUND((ur.UrineOutput_6hr/wd.weight/(uo_tm_6hr+1))::NUMERIC, 4) AS uo_rt_6hr \ + , ROUND((ur.UrineOutput_12hr/wd.weight/(uo_tm_12hr+1))::NUMERIC, 4) AS uo_rt_12hr \ + , ROUND((ur.UrineOutput_24hr/wd.weight/(uo_tm_24hr+1))::NUMERIC, 4) AS uo_rt_24hr \ + , uo_tm_6hr \ + , uo_tm_12hr \ + , uo_tm_24hr \ + from ur_stg ur \ + left join weightdurations wd \ + on ur.icustay_id = wd.icustay_id \ + and ur.charttime >= wd.starttime \ + and ur.charttime < wd.endtime \ + order by icustay_id, charttime; " + + cursor.execute(view) + + +def kidigo_7_days_creatinine(cursor): + + # -- This query checks if the patient had AKI during the first 7 days of their ICU + # -- stay according to the KDIGO guideline. + # -- https://kdigo.org/wp-content/uploads/2016/10/KDIGO-2012-AKI-Guideline-English.pdf + + view = "DROP MATERIALIZED VIEW IF EXISTS kdigo_7_days_creatinine; \ + CREATE MATERIALIZED VIEW kdigo_7_days_creatinine AS \ + WITH cr_aki AS \ + ( \ + SELECT \ + k.icustay_id \ + , k.DBSOURCE \ + , k.charttime \ + , k.creat \ + , k.aki_stage_creat \ + , ROW_NUMBER() OVER (PARTITION BY k.icustay_id ORDER BY k.aki_stage_creat DESC, k.creat DESC) AS rn \ + FROM mimiciii.icustays ie \ + INNER JOIN kdigo_stages_creatinine k \ + ON ie.icustay_id = k.icustay_id \ + WHERE k.charttime > (ie.intime - interval '6' hour) \ + AND k.charttime <= (ie.intime + interval '7' day) \ + AND k.aki_stage_creat IS NOT NULL \ + ) \ + select \ + ie.icustay_id \ + , ie.DBSOURCE \ + , cr.charttime as charttime_creat \ + , cr.creat \ + , cr.aki_stage_creat \ + , cr.aki_stage_creat AS aki_stage_7day \ + , CASE WHEN (cr.aki_stage_creat > 0) THEN 1 ELSE 0 END AS aki_7day \ + FROM mimiciii.icustays ie \ + LEFT JOIN cr_aki cr \ + ON ie.icustay_id = cr.icustay_id \ + AND cr.rn = 1 \ + order by ie.icustay_id; " + + cursor.execute(view) + + +def kidigo_stages_creatinine(cursor): + + # -- This query checks if the patient had AKI according to KDIGO. + # -- AKI is calculated every time a creatinine or urine output measurement occurs. + # -- Baseline creatinine is defined as the lowest creatinine in the past 7 days. + + view = " DROP MATERIALIZED VIEW IF EXISTS kdigo_stages_creatinine CASCADE; \ + CREATE MATERIALIZED VIEW kdigo_stages_creatinine AS \ + with cr_stg AS \ + ( \ + SELECT \ + cr.icustay_id \ + , cr.charttime \ + , cr.creat \ + , case \ + when cr.creat >= (cr.creat_low_past_7day*3.0) then 3 \ + when cr.creat >= 4 \ + and (cr.creat_low_past_48hr <= 3.7 OR cr.creat >= (1.5*cr.creat_low_past_7day)) \ + then 3 \ + when cr.creat >= (cr.creat_low_past_7day*2.0) then 2 \ + when cr.creat >= (cr.creat_low_past_48hr+0.3) then 1 \ + when cr.creat >= (cr.creat_low_past_7day*1.5) then 1 \ + else 0 end as aki_stage_creat \ + FROM kdigo_creat cr \ + ) \ + , tm_stg AS \ + ( \ + SELECT \ + icustay_id, charttime \ + FROM cr_stg \ + ) \ + select \ + ie.icustay_id \ + , ie.DBSOURCE \ + , tm.charttime \ + , cr.creat \ + , cr.aki_stage_creat \ + , cr.aki_stage_creat AS aki_stage \ + FROM mimiciii.icustays ie \ + LEFT JOIN tm_stg tm \ + ON ie.icustay_id = tm.icustay_id \ + LEFT JOIN cr_stg cr \ + ON ie.icustay_id = cr.icustay_id \ + AND tm.charttime = cr.charttime \ + order by ie.icustay_id, tm.charttime; " + + cursor.execute(view) + + +def get_labevents(cursor): + + # -- This query pivots lab values taken during the 7 first days of a patient's stay + # -- Have already confirmed that the unit of measurement is always the same: null or the correct unit + + # -- Extract all bicarbonate, blood urea nitrogen (BUN), calcium, chloride, creatinine, + # hemoglobin, international normalized ratio (INR), platelet, potassium, prothrombin time (PT), + # partial throm- boplastin time (PTT), and white blood count (WBC) values from labevents around patient's ICU stay + + view = "DROP MATERIALIZED VIEW IF EXISTS labstay CASCADE; \ + CREATE materialized VIEW labstay AS \ + SELECT \ + pvt.subject_id, pvt.hadm_id, pvt.icustay_id , pvt.DBSOURCE \ + , min(CASE WHEN label = 'ANION GAP' THEN valuenum ELSE null END) as ANIONGAP_min \ + , max(CASE WHEN label = 'ANION GAP' THEN valuenum ELSE null END) as ANIONGAP_max \ + , min(CASE WHEN label = 'ALBUMIN' THEN valuenum ELSE null END) as ALBUMIN_min \ + , max(CASE WHEN label = 'ALBUMIN' THEN valuenum ELSE null END) as ALBUMIN_max \ + , min(CASE WHEN label = 'BANDS' THEN valuenum ELSE null END) as BANDS_min \ + , max(CASE WHEN label = 'BANDS' THEN valuenum ELSE null END) as BANDS_max \ + , min(CASE WHEN label = 'BICARBONATE' THEN valuenum ELSE null END) as BICARBONATE_min \ + , max(CASE WHEN label = 'BICARBONATE' THEN valuenum ELSE null END) as BICARBONATE_max \ + , min(CASE WHEN label = 'BILIRUBIN' THEN valuenum ELSE null END) as BILIRUBIN_min \ + , max(CASE WHEN label = 'BILIRUBIN' THEN valuenum ELSE null END) as BILIRUBIN_max \ + , min(CASE WHEN label = 'CREATININE' THEN valuenum ELSE null END) as CREATININE_min \ + , max(CASE WHEN label = 'CREATININE' THEN valuenum ELSE null END) as CREATININE_max \ + , min(CASE WHEN label = 'CHLORIDE' THEN valuenum ELSE null END) as CHLORIDE_min \ + , max(CASE WHEN label = 'CHLORIDE' THEN valuenum ELSE null END) as CHLORIDE_max \ + , min(CASE WHEN label = 'GLUCOSE' THEN valuenum ELSE null END) as GLUCOSE_min \ + , max(CASE WHEN label = 'GLUCOSE' THEN valuenum ELSE null END) as GLUCOSE_max \ + , min(CASE WHEN label = 'HEMATOCRIT' THEN valuenum ELSE null END) as HEMATOCRIT_min \ + , max(CASE WHEN label = 'HEMATOCRIT' THEN valuenum ELSE null END) as HEMATOCRIT_max \ + , min(CASE WHEN label = 'HEMOGLOBIN' THEN valuenum ELSE null END) as HEMOGLOBIN_min \ + , max(CASE WHEN label = 'HEMOGLOBIN' THEN valuenum ELSE null END) as HEMOGLOBIN_max \ + , min(CASE WHEN label = 'LACTATE' THEN valuenum ELSE null END) as LACTATE_min \ + , max(CASE WHEN label = 'LACTATE' THEN valuenum ELSE null END) as LACTATE_max \ + , min(CASE WHEN label = 'PLATELET' THEN valuenum ELSE null END) as PLATELET_min \ + , max(CASE WHEN label = 'PLATELET' THEN valuenum ELSE null END) as PLATELET_max \ + , min(CASE WHEN label = 'POTASSIUM' THEN valuenum ELSE null END) as POTASSIUM_min \ + , max(CASE WHEN label = 'POTASSIUM' THEN valuenum ELSE null END) as POTASSIUM_max \ + , min(CASE WHEN label = 'PTT' THEN valuenum ELSE null END) as PTT_min \ + , max(CASE WHEN label = 'PTT' THEN valuenum ELSE null END) as PTT_max \ + , min(CASE WHEN label = 'INR' THEN valuenum ELSE null END) as INR_min \ + , max(CASE WHEN label = 'INR' THEN valuenum ELSE null END) as INR_max \ + , min(CASE WHEN label = 'PT' THEN valuenum ELSE null END) as PT_min \ + , max(CASE WHEN label = 'PT' THEN valuenum ELSE null END) as PT_max \ + , min(CASE WHEN label = 'SODIUM' THEN valuenum ELSE null END) as SODIUM_min \ + , max(CASE WHEN label = 'SODIUM' THEN valuenum ELSE null end) as SODIUM_max \ + , min(CASE WHEN label = 'BUN' THEN valuenum ELSE null end) as BUN_min \ + , max(CASE WHEN label = 'BUN' THEN valuenum ELSE null end) as BUN_max \ + , min(CASE WHEN label = 'WBC' THEN valuenum ELSE null end) as WBC_min \ + , max(CASE WHEN label = 'WBC' THEN valuenum ELSE null end) as WBC_max \ + FROM \ + ( SELECT ie.subject_id, ie.hadm_id, ie.icustay_id ,ie.DBSOURCE \ + , CASE \ + WHEN itemid = 50868 THEN 'ANION GAP' \ + WHEN itemid = 50862 THEN 'ALBUMIN' \ + WHEN itemid = 51144 THEN 'BANDS' \ + WHEN itemid = 50882 THEN 'BICARBONATE' \ + WHEN itemid = 50885 THEN 'BILIRUBIN' \ + WHEN itemid = 50912 THEN 'CREATININE' \ + WHEN itemid = 50806 THEN 'CHLORIDE' \ + WHEN itemid = 50902 THEN 'CHLORIDE' \ + WHEN itemid = 50809 THEN 'GLUCOSE' \ + WHEN itemid = 50931 THEN 'GLUCOSE' \ + WHEN itemid = 50810 THEN 'HEMATOCRIT' \ + WHEN itemid = 51221 THEN 'HEMATOCRIT' \ + WHEN itemid = 50811 THEN 'HEMOGLOBIN' \ + WHEN itemid = 51222 THEN 'HEMOGLOBIN' \ + WHEN itemid = 50813 THEN 'LACTATE' \ + WHEN itemid = 51265 THEN 'PLATELET' \ + WHEN itemid = 50822 THEN 'POTASSIUM' \ + WHEN itemid = 50971 THEN 'POTASSIUM' \ + WHEN itemid = 51275 THEN 'PTT' \ + WHEN itemid = 51237 THEN 'INR' \ + WHEN itemid = 51274 THEN 'PT' \ + WHEN itemid = 50824 THEN 'SODIUM' \ + WHEN itemid = 50983 THEN 'SODIUM' \ + WHEN itemid = 51006 THEN 'BUN' \ + WHEN itemid = 51300 THEN 'WBC' \ + WHEN itemid = 51301 THEN 'WBC' \ + ELSE null \ + END AS label \ + , CASE \ + WHEN itemid = 50862 and valuenum > 10 THEN null \ + WHEN itemid = 50868 and valuenum > 10000 THEN null \ + WHEN itemid = 51144 and valuenum < 0 THEN null \ + WHEN itemid = 51144 and valuenum > 100 THEN null \ + WHEN itemid = 50882 and valuenum > 10000 THEN null \ + WHEN itemid = 50885 and valuenum > 150 THEN null \ + WHEN itemid = 50806 and valuenum > 10000 THEN null \ + WHEN itemid = 50902 and valuenum > 10000 THEN null \ + WHEN itemid = 50912 and valuenum > 150 THEN null \ + WHEN itemid = 50809 and valuenum > 10000 THEN null \ + WHEN itemid = 50931 and valuenum > 10000 THEN null \ + WHEN itemid = 50810 and valuenum > 100 THEN null \ + WHEN itemid = 51221 and valuenum > 100 THEN null \ + WHEN itemid = 50811 and valuenum > 50 THEN null \ + WHEN itemid = 51222 and valuenum > 50 THEN null \ + WHEN itemid = 50813 and valuenum > 50 THEN null \ + WHEN itemid = 51265 and valuenum > 10000 THEN null \ + WHEN itemid = 50822 and valuenum > 30 THEN null \ + WHEN itemid = 50971 and valuenum > 30 THEN null \ + WHEN itemid = 51275 and valuenum > 150 THEN null \ + WHEN itemid = 51237 and valuenum > 50 THEN null \ + WHEN itemid = 51274 and valuenum > 150 THEN null \ + WHEN itemid = 50824 and valuenum > 200 THEN null \ + WHEN itemid = 50983 and valuenum > 200 THEN null \ + WHEN itemid = 51006 and valuenum > 300 THEN null \ + WHEN itemid = 51300 and valuenum > 1000 THEN null \ + WHEN itemid = 51301 and valuenum > 1000 THEN null \ + ELSE le.valuenum \ + END AS valuenum \ + FROM mimiciii.icustays ie \ + LEFT JOIN mimiciii.labevents le \ + ON le.subject_id = ie.subject_id AND le.hadm_id = ie.hadm_id \ + AND le.CHARTTIME between (ie.intime - interval '6' hour) and (ie.intime + interval '7' day)\ + AND le.ITEMID in \ + ( \ + 50868, \ + 50862, \ + 51144, \ + 50882, \ + 50885, \ + 50912, \ + 50902, \ + 50806, \ + 50931, \ + 50809, \ + 51221, \ + 50810, \ + 51222, \ + 50811, \ + 50813, \ + 51265, \ + 50971, \ + 50822, \ + 51275, \ + 51237, \ + 51274, \ + 50983, \ + 50824, \ + 51006, \ + 51301, \ + 51300 \ + ) \ + AND valuenum IS NOT null AND valuenum > 0 \ + ) pvt \ + GROUP BY pvt.subject_id, pvt.hadm_id, pvt.icustay_id , pvt.DBSOURCE \ + ORDER BY pvt.subject_id, pvt.hadm_id, pvt.icustay_id, pvt.DBSOURCE;" + + cursor.execute(view) + + +def get_vitals_chart(cursor): + + # -- This query pivots the vital signs during the first 7 days of a patient's stay + # -- Vital signs include heart rate, blood pressure, respiration rate, and temperature + + view = "DROP MATERIALIZED VIEW IF EXISTS vitalsfirstday CASCADE; \ + create materialized view vitalsfirstday as \ + SELECT pvt.subject_id, pvt.hadm_id, pvt.icustay_id, pvt.DBSOURCE \ + , min(case when VitalID = 1 then valuenum else null end) as HeartRate_Min \ + , max(case when VitalID = 1 then valuenum else null end) as HeartRate_Max \ + , avg(case when VitalID = 1 then valuenum else null end) as HeartRate_Mean \ + , min(case when VitalID = 2 then valuenum else null end) as SysBP_Min \ + , max(case when VitalID = 2 then valuenum else null end) as SysBP_Max \ + , avg(case when VitalID = 2 then valuenum else null end) as SysBP_Mean \ + , min(case when VitalID = 3 then valuenum else null end) as DiasBP_Min \ + , max(case when VitalID = 3 then valuenum else null end) as DiasBP_Max \ + , avg(case when VitalID = 3 then valuenum else null end) as DiasBP_Mean \ + , min(case when VitalID = 4 then valuenum else null end) as MeanBP_Min \ + , max(case when VitalID = 4 then valuenum else null end) as MeanBP_Max \ + , avg(case when VitalID = 4 then valuenum else null end) as MeanBP_Mean \ + , min(case when VitalID = 5 then valuenum else null end) as RespRate_Min \ + , max(case when VitalID = 5 then valuenum else null end) as RespRate_Max \ + , avg(case when VitalID = 5 then valuenum else null end) as RespRate_Mean \ + , min(case when VitalID = 6 then valuenum else null end) as TempC_Min \ + , max(case when VitalID = 6 then valuenum else null end) as TempC_Max \ + , avg(case when VitalID = 6 then valuenum else null end) as TempC_Mean \ + , min(case when VitalID = 7 then valuenum else null end) as SpO2_Min \ + , max(case when VitalID = 7 then valuenum else null end) as SpO2_Max \ + , avg(case when VitalID = 7 then valuenum else null end) as SpO2_Mean \ + , min(case when VitalID = 8 then valuenum else null end) as Glucose_Min \ + , max(case when VitalID = 8 then valuenum else null end) as Glucose_Max \ + , avg(case when VitalID = 8 then valuenum else null end) as Glucose_Mean \ + FROM ( \ + select ie.subject_id, ie.hadm_id, ie.icustay_id, ie.DBSOURCE\ + , case \ + when itemid in (211,220045) and valuenum > 0 and valuenum < 300 then 1 \ + when itemid in (51,442,455,6701,220179,220050) and valuenum > 0 and valuenum < 400 then 2 \ + when itemid in (8368,8440,8441,8555,220180,220051) and valuenum > 0 and valuenum < 300 then 3 \ + when itemid in (456,52,6702,443,220052,220181,225312) and valuenum > 0 and valuenum < 300 then 4 \ + when itemid in (615,618,220210,224690) and valuenum > 0 and valuenum < 70 then 5 \ + when itemid in (223761,678) and valuenum > 70 and valuenum < 120 then 6 \ + when itemid in (223762,676) and valuenum > 10 and valuenum < 50 then 6 \ + when itemid in (646,220277) and valuenum > 0 and valuenum <= 100 then 7 \ + when itemid in (807,811,1529,3745,3744,225664,220621,226537) and valuenum > 0 then 8 \ + else null end as VitalID \ + , case when itemid in (223761,678) then (valuenum-32)/1.8 else valuenum end as valuenum \ + from mimiciii.icustays ie \ + left join mimiciii.chartevents ce \ + on ie.subject_id = ce.subject_id and ie.hadm_id = ce.hadm_id and ie.icustay_id = ce.icustay_id \ + and ce.charttime between ie.intime - interval '6' hour and ie.intime + interval '7' day \ + and ce.error IS DISTINCT FROM 1 \ + where ce.itemid in \ + ( \ + 211, \ + 220045, \ + 51, \ + 442, \ + 455, \ + 6701, \ + 220179, \ + 220050, \ + 8368, \ + 8440, \ + 8441, \ + 8555, \ + 220180, \ + 220051, \ + 456, \ + 52, \ + 6702, \ + 443, \ + 220052, \ + 220181, \ + 225312, \ + 618, \ + 615, \ + 220210, \ + 224690, \ + 646, 220277, \ + 807, \ + 811, \ + 1529, \ + 3745, \ + 3744, \ + 225664, \ + 220621, \ + 226537, \ + 223762, \ + 676, \ + 223761, \ + 678 \ + ) \ + ) pvt \ + group by pvt.subject_id, pvt.hadm_id, pvt.icustay_id, pvt.DBSOURCE \ + order by pvt.subject_id, pvt.hadm_id, pvt.icustay_id, pvt.DBSOURCE;" + + cursor.execute(view) + + +def get_comorbidities(cursor): + + view = "DROP MATERIALIZED VIEW IF EXISTS COMORBIDITIES CASCADE; \ + CREATE MATERIALIZED VIEW COMORBIDITIES AS \ + with icd as \ + ( \ + select hadm_id, seq_num, icd9_code \ + from mimiciii.diagnoses_icd \ + where seq_num != 1 \ + ) \ + , eliflg as \ + (\ + select hadm_id, seq_num, icd9_code\ + , CASE\ + when icd9_code in ('39891','40201','40211','40291','40401','40403','40411','40413','40491','40493') then 1\ + when SUBSTRING(icd9_code FROM 1 for 4) in ('4254','4255','4257','4258','4259') then 1\ + when SUBSTRING(icd9_code FROM 1 for 3) in ('428') then 1\ + else 0 end as CHF\ + , CASE \ + when icd9_code in ('42613','42610','42612','99601','99604') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('4260','4267','4269','4270','4271','4272','4273','4274','4276','4278','4279','7850','V450','V533') then 1 \ + else 0 end as ARRHY \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('0932','7463','7464','7465','7466','V422','V433') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('394','395','396','397','424') then 1 \ + else 0 end as VALVE \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('0930','4373','4431','4432','4438','4439','4471','5571','5579','V434') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('440','441') then 1 \ + else 0 end as PERIVASC \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('401') then 1 \ + else 0 end as HTN \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('402','403','404','405') then 1 \ + else 0 end as HTNCX \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2500','2501','2502','2503') then 1 \ + else 0 end as DM \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2504','2505','2506','2507','2508','2509') then 1 \ + else 0 end as DMCX \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2409','2461','2468') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('243','244') then 1 \ + else 0 end as HYPOTHY \ + , CASE \ + when icd9_code in ('40301','40311','40391','40402','40403','40412','40413','40492','40493') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('5880','V420','V451') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('585','586','V56') then 1 \ + else 0 end as RENLFAIL \ + , CASE \ + when icd9_code in ('07022','07023','07032','07033','07044','07054') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('0706','0709','4560','4561','4562','5722','5723','5724','5728','5733','5734','5738','5739','V427') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('570','571') then 1 \ + else 0 end as LIVER \ + , CASE \ + when icd9_code in ('72889','72930') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('7010','7100','7101','7102','7103','7104','7108','7109','7112','7193','7285') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('446','714','720','725') then 1 \ + else 0 end as ARTH \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2871','2873','2874','2875') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('286') then 1 \ + else 0 end as COAG \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2780') then 1 \ + else 0 end as OBESE \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2536') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('276') then 1 \ + else 0 end as LYTES \ + , CASE \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('2652','2911','2912','2913','2915','2918','2919','3030','3039','3050','3575','4255','5353','5710','5711','5712','5713','V113') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('980') then 1 \ + else 0 end as ALCOHOL \ + , CASE \ + when icd9_code in ('V6542') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 4) in ('3052','3053','3054','3055','3056','3057','3058','3059') then 1 \ + when SUBSTRING(icd9_code FROM 1 for 3) in ('292','304') then 1 \ + else 0 end as DRUG \ + from icd \ + )\ + , eligrp as \ + ( \ + select hadm_id \ + , max(chf) as chf \ + , max(arrhy) as arrhy \ + , max(valve) as valve \ + , max(perivasc) as perivasc \ + , max(htn) as htn \ + , max(htncx) as htncx \ + , max(renlfail) as renlfail \ + , max(dm) as dm \ + , max(dmcx) as dmcx \ + , max(hypothy) as hypothy \ + , max(liver) as liver \ + , max(arth) as arth \ + , max(coag) as coag \ + , max(obese) as obese \ + , max(lytes) as lytes \ + , max(alcohol) as alcohol \ + , max(drug) as drug \ + from eliflg \ + group by hadm_id \ + ) \ + select adm.hadm_id \ + , chf as CONGESTIVE_HEART_FAILURE \ + , arrhy as CARDIAC_ARRHYTHMIAS \ + , valve as VALVULAR_DISEASE \ + , perivasc as PERIPHERAL_VASCULAR \ + , renlfail as RENAL_FAILURE \ + , case \ + when htn = 1 then 1 \ + when htncx = 1 then 1 \ + else 0 end as HYPERTENSION \ + , case \ + when dmcx = 1 then 0 \ + when dm = 1 then 1 \ + else 0 end as DIABETES_UNCOMPLICATED \ + , dmcx as DIABETES_COMPLICATED \ + , hypothy as HYPOTHYROIDISM \ + , liver as LIVER_DISEASE \ + , obese as OBESITY \ + , alcohol as ALCOHOL_ABUSE \ + , drug as DRUG_ABUSE \ + from mimiciii.admissions adm \ + left join eligrp eli \ + on adm.hadm_id = eli.hadm_id \ + order by adm.hadm_id;" + + cursor.execute(view) + + +def count_icustays(cursor): + query = "select * from mimiciii.icustays" + cursor.execute(query) + + rows = cursor.fetchall() + print(len(rows)) + + +def preprocess(db: str, host: str, user: str, password: str, output_path: str): + os.makedirs(output_path, exist_ok=True) + + try: + conn = psycopg2.connect(host=host, user=user, password=password, database=db) + cursor = conn.cursor() + except Exception as error: + print(error) + + print("connection succeded") + + urine_output(cursor) + print("view urine_output created") + echo_data(cursor) + print("view echo_data created") + weight_duration(cursor) + print("view weight_duration created") + urine_kidigo(cursor) + print("view urine_kidigo created") + creatinine(cursor) + print("view creatinine created") + + kidigo_stages_creatinine(cursor) + print("view kidigo_stages_creatinine created") + kidigo_7_days_creatinine(cursor) + print("view kidigo_7_days_creatinine created") + query = "select * from kdigo_7_days_creatinine" + df = pd.read_sql_query(query, conn) + + df.to_csv( + os.path.join(output_path, "AKI_KIDIGO_7D_SQL_CREATININE_DBSOURCE.csv"), + encoding="utf-8", + header=True, + ) + + get_labevents(cursor) + + query = "select * from labstay" + df = pd.read_sql_query(query, conn) + df.to_csv( + os.path.join(output_path, "labstay_DBSOURCE.csv"), + encoding="utf-8", + header=True, + ) + + get_vitals_chart(cursor) + + query = "select * from vitalsfirstday" + df = pd.read_sql_query(query, conn) + df.to_csv( + os.path.join(output_path, "chart_vitals_stay_DBSOURCE.csv"), + encoding="utf-8", + header=True, + ) + + get_comorbidities(cursor) + + query = "select * from COMORBIDITIES" + df = pd.read_sql_query(query, conn) + df.to_csv( + os.path.join(output_path, "comorbidities_DBSOURCE.csv"), + encoding="utf-8", + header=True, + ) + + count_icustays(cursor) + + +def main(): + parser = argparse.ArgumentParser() + + parser.add_argument( + "--host", + type=str, + default="localhost", + help="Database Host containing MIMIC III", + ) + parser.add_argument( + "--db", + type=str, + default="mimic", + help="Database name containing MIMIC III data", + ) + parser.add_argument( + "--user", + type=str, + default="postgres", + help="Database user", + ) + parser.add_argument( + "--password", + type=str, + default="postgres", + help="Database user's password", + ) + parser.add_argument( + "--out_path", + type=str, + default="./formatted", + help="Output path to save formatted MIMIC III", + ) + + args = parser.parse_args() + + preprocess(args.db, args.host, args.user, args.password, args.path) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/data/mimiciii/preprocess.py b/data/mimiciii/preprocess.py new file mode 100644 index 0000000..14b4f71 --- /dev/null +++ b/data/mimiciii/preprocess.py @@ -0,0 +1,410 @@ +import os + +import argparse + +import numpy as np +import pandas as pd + + +class Reader: + def __init__(self, data_path): + self.data_path = data_path + + def read_admissions_table(self): + df = pd.read_csv( + os.path.join(self.data_path, "ADMISSIONS.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + df.columns = map(str.upper, df.columns) + df.ADMITTIME = pd.to_datetime(df.ADMITTIME) + df.DISCHTIME = pd.to_datetime(df.DISCHTIME) + df = df[["SUBJECT_ID", "HADM_ID", "ADMITTIME", "DISCHTIME", "ETHNICITY"]] + return df + + def read_icustay_table(self): + df = pd.read_csv( + os.path.join(self.data_path, "ICUSTAYS.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + df.columns = map(str.upper, df.columns) + df.INTIME = pd.to_datetime(df.INTIME) + df.OUTTIME = pd.to_datetime(df.OUTTIME) + df = df[["SUBJECT_ID", "HADM_ID", "ICUSTAY_ID", "INTIME", "OUTTIME", "LOS"]] + return df + + def read_d_icd_diagnoses_table(self): + d_icd_diagnoses = pd.read_csv( + os.path.join(self.data_path, "D_ICD_DIAGNOSES.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + d_icd_diagnoses.columns = map(str.upper, d_icd_diagnoses.columns) + return d_icd_diagnoses + + def read_d_items_table(self): + d_items = pd.read_csv( + os.path.join(self.data_path, "D_ITEMS.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + d_items.columns = map(str.upper, d_items.columns) + d_items = d_items[["ITEMID", "LABEL", "DBSOURCE", "PARAM_TYPE"]] + return d_items + + def read_d_labitems_table(self): + d_labitems = pd.read_csv( + os.path.join(self.data_path, "D_LABITEMS.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + d_labitems.columns = map(str.upper, d_labitems.columns) + d_labitems = d_labitems[["ITEMID", "LABEL", "FLUID", "CATEGORY"]] + return d_labitems + + def read_patients_table(self): + patients = pd.read_csv( + os.path.join(self.data_path, "PATIENTS.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + patients.columns = map(str.upper, patients.columns) + patients = patients[["SUBJECT_ID", "GENDER", "DOB"]] + return patients + + def read_diagnoses_icd_table(self): + diagnoses_icd = pd.read_csv( + os.path.join(self.data_path, "DIAGNOSES_ICD.csv.gz"), + compression="gzip", + header=0, + index_col=0, + ) + diagnoses_icd.columns = map(str.upper, diagnoses_icd.columns) + diagnoses_icd = diagnoses_icd[["SUBJECT_ID", "HADM_ID", "ICD9_CODE", "SEQ_NUM"]] + return diagnoses_icd + + +cache = {} + + +def get_info_admissions(reader: Reader, formatted_path: str): + + df = reader.read_admissions_table() + df["STAYTIME"] = ( + df["DISCHTIME"] - df["ADMITTIME"] + ) # stay time : discharge time - admission time + df["STAYTIME"] = df["STAYTIME"] / np.timedelta64(1, "h") + + # formula to calcultate the age of patiens in MIMIC3 + + patients = reader.read_patients_table() + df = pd.merge(df, patients, how="left", on="SUBJECT_ID") + df["DOB"] = pd.to_datetime(df["DOB"]) + df["ADMITTIME"] = pd.to_datetime(df["ADMITTIME"]) + df["AGE"] = df["ADMITTIME"].dt.year - df["DOB"].dt.year + + # Patients who are older than 89 years old at any time in the database + # have had their date of birth shifted to obscure their age and comply with HIPAA. + # The date of birth was then set to exactly 300 years before their first admission. + df.loc[((df.AGE > 89) | (df.AGE < 0)), "AGE"] = 90 + + icustays = reader.read_icustay_table() + + # merge on the HADM_ID, unique, represents a single patient's admission to the hospital + # while subject_id can be redundant meaning that a patient had many stays at the hospital + df = pd.merge(df, icustays, how="right", on="HADM_ID") + + # the elapsed time between the admission to the hospital and the tranfer to the ICU + df["Time go ICU"] = (df["INTIME"] - df["ADMITTIME"]) / np.timedelta64(1, "h") + + # the elapsed time in the ICU + df["Time in ICU"] = (df["OUTTIME"] - df["INTIME"]) / np.timedelta64(1, "h") + + # the elapsed time between the admission to the ICU and the final discharge from the hospital + df["Time after go ICU"] = (df["DISCHTIME"] - df["INTIME"]) / np.timedelta64(1, "h") + + # number of times the patient has been transferred to the ICU during one admission + df["Count times go ICU"] = df.groupby("HADM_ID")["ICUSTAY_ID"].transform("count") + + with open(os.path.join(formatted_path, "ADMISSIONS.csv"), "w") as f: + df.to_csv(f, encoding="utf-8", header=True) + + +def check_AKI_before(hadm_id, dataset_path: str): + key = "check_AKI_before" + global cache + + if key not in cache: + diagnoses = pd.read_csv( + os.path.join(dataset_path, "DIAGNOSES_ICD.csv.gz"), compression="gzip" + ) + diagnoses.columns = map(str.upper, diagnoses.columns) + diagnoses = diagnoses.loc[ + diagnoses["ICD9_CODE"].isin(["5845", "5846", "5847", "5848"]) + ] + cache[key] = diagnoses + + diagnoses = cache[key] + + if not diagnoses[diagnoses["HADM_ID"].isin(hadm_id)].empty: + return True + + return False + + +def check_CKD(hadm_id, dataset_path: str): + key = "check_CKD" + global cache + + if key not in cache: + diagnoses = pd.read_csv( + os.path.join(dataset_path, "DIAGNOSES_ICD.csv.gz"), compression="gzip" + ) + diagnoses.columns = map(str.upper, diagnoses.columns) + diagnoses = diagnoses.loc[ + diagnoses["ICD9_CODE"].isin(["5851", "5852", "5853", "5854", "5855"]) + ] + cache[key] = diagnoses + + diagnoses = cache[key] + + if not diagnoses[diagnoses["HADM_ID"].isin(hadm_id)].empty: + return True + + return False + + +def check_renal_failure(hadm_id, formatted_path: str): + key = "check_renal_failure" + global cache + + if key not in cache: + diagnoses = pd.read_csv( + os.path.join(formatted_path, "comorbidities_DBSOURCE.csv") + ) + diagnoses.columns = map(str.upper, diagnoses.columns) + diagnoses = diagnoses.loc[diagnoses["RENAL_FAILURE"] == 1] + cache[key] = diagnoses + + diagnoses = cache[key] + + if not diagnoses[diagnoses["HADM_ID"].isin(hadm_id)].empty: + return True + + return False + + +def caculate_eGFR_MDRD_equation(cr, gender, eth, age): + temp = 186 * (cr ** (-1.154)) * (age ** (-0.203)) + if gender == "F": + temp = temp * 0.742 + if eth == "BLACK/AFRICAN AMERICAN": + temp = temp * 1.21 + return temp + + +def get_aki_patients_7days_creatinine(reader: Reader, formatted_path: str): + dataset_path = reader.data_path + + df = pd.read_csv(os.path.join(formatted_path, "ADMISSIONS.csv")) + df = df.sort_values(by=["SUBJECT_ID_x", "HADM_ID", "ICUSTAY_ID"]) + + print("admissions info", df.shape) + print("number of unique subjects in admission: ", df["SUBJECT_ID_x"].nunique()) + print("number of icustays info in admissions: ", df["ICUSTAY_ID"].nunique()) + + info_save = df.drop_duplicates(subset=["ICUSTAY_ID"]) + info_save["AKI"] = -1 + info_save["EGFR"] = -1 + + print( + "the biggest number of ICU stays for a patient: ", + info_save["Count times go ICU"].max(), + ) + + c_aki_7d = pd.read_csv( + os.path.join(formatted_path, "AKI_KIDIGO_7D_SQL_CREATININE_DBSOURCE.csv") + ) + c_aki_7d.columns = map(str.upper, c_aki_7d.columns) + c_aki_7d = c_aki_7d.drop(columns=["UNNAMED: 0"]) + print("c_aki_7d infos") + print("Total icustays: ", c_aki_7d["ICUSTAY_ID"].nunique()) + print( + "NORMAL Patients in 7DAY: {}".format( + c_aki_7d.loc[c_aki_7d["AKI_STAGE_7DAY"] == 0]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI patients STAGE 1 within 7DAY: {}".format( + c_aki_7d.loc[c_aki_7d["AKI_STAGE_7DAY"] == 1]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI Patients STAGE 2 in 7DAY: {}".format( + c_aki_7d.loc[c_aki_7d["AKI_STAGE_7DAY"] == 2]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI Patients STAGE 3 7DAY: {}".format( + c_aki_7d.loc[c_aki_7d["AKI_STAGE_7DAY"] == 3]["ICUSTAY_ID"].count() + ) + ) + print( + "NAN patients within 7DAY: {}".format(c_aki_7d["AKI_STAGE_7DAY"].isna().sum()) + ) + c_aki_7d = c_aki_7d.dropna(subset=["AKI_STAGE_7DAY"]) + + print("Total icustays: ", c_aki_7d["ICUSTAY_ID"].nunique()) + + df_save = pd.merge(info_save, c_aki_7d, how="inner", on="ICUSTAY_ID") + df_save.columns = map(str.upper, df_save.columns) + icustays_data = [frame for season, frame in df_save.groupby(["ICUSTAY_ID"])] + + count_ckd_normal = 0 + count_ckd_aki = 0 + count_akibefore_normal = 0 + count_akibefore_aki = 0 + count_normal = 0 + count_aki = 0 + count_renalfailure_normal = 0 + count_renalfailure_aki = 0 + + for temp in icustays_data: + + temp = temp.sort_values(by=["ICUSTAY_ID"]) + + first_row = temp.iloc[0] + gender = first_row["GENDER"] + age = first_row["AGE"] + eth = first_row["ETHNICITY"] + cr = first_row["CREAT"] + icustay_id = first_row["ICUSTAY_ID"] + + eGFR = caculate_eGFR_MDRD_equation(cr=cr, gender=gender, age=age, eth=eth) + + df_save.loc[df_save["ICUSTAY_ID"] == icustay_id, "EGFR"] = eGFR + df_save.loc[df_save["ICUSTAY_ID"] == icustay_id, "AKI"] = c_aki_7d.loc[ + c_aki_7d["ICUSTAY_ID"] == icustay_id + ]["AKI_7DAY"].values[0] + + if df_save.loc[df_save["ICUSTAY_ID"] == icustay_id, "AKI"].values[0] == 1: + count_aki = count_aki + 1 + else: + count_normal = count_normal + 1 + + has_aki = ( + info_save.loc[info_save["ICUSTAY_ID"] == icustay_id, "AKI"].values[0] == 1 + ) + + if check_CKD(temp["HADM_ID"], dataset_path) == True: + df_save.loc[df_save["ICUSTAY_ID"] == icustay_id, "AKI"] = 2 + if has_aki: + count_ckd_aki = count_ckd_aki + 1 + else: + count_ckd_normal = count_ckd_normal + 1 + + if check_AKI_before(temp["HADM_ID"], dataset_path) == True: + df_save.loc[df_save["ICUSTAY_ID"] == icustay_id, "AKI"] = 3 + if has_aki: + count_akibefore_aki = count_akibefore_aki + 1 + else: + count_akibefore_normal = count_akibefore_normal + 1 + + if check_renal_failure(temp["HADM_ID"], formatted_path) == True: + df_save.loc[df_save["ICUSTAY_ID"] == icustay_id, "AKI"] = 4 + if has_aki: + count_renalfailure_aki = count_renalfailure_aki + 1 + else: + count_renalfailure_normal = count_renalfailure_normal + 1 + + lab = pd.read_csv(os.path.join(formatted_path, "labstay_DBSOURCE.csv")) + lab.columns = map(str.upper, lab.columns) + info_save = pd.merge(df_save, lab, how="left", on="ICUSTAY_ID") + cols_to_drop = set(info_save.columns).intersection( + set(["UNNAMED: 0_x", "UNNAMED: 0_y", "SUBJECT_ID"]) + ) + if len(cols_to_drop) > 0: + info_save = info_save.drop(columns=list(cols_to_drop)) + info_save = info_save.rename( + columns={"SUBJECT_ID_X": "SUBJECT_ID", "HADM_ID_x": "HADM_ID"} + ) + + chart = pd.read_csv(os.path.join(formatted_path, "chart_vitals_stay_DBSOURCE.csv")) + chart.columns = map(str.upper, chart.columns) + df_save = pd.merge(info_save, chart, how="left", on="ICUSTAY_ID") + df_save = df_save.drop( + columns=["UNNAMED: 0", "HADM_ID_y", "HADM_ID_y", "SUBJECT_ID_Y", "SUBJECT_ID_y"] + ) + df_save = df_save.rename( + columns={"SUBJECT_ID_X": "SUBJECT_ID", "HADM_ID_x": "HADM_ID"} + ) + + comorbidities = pd.read_csv( + os.path.join(formatted_path, "comorbidities_DBSOURCE.csv") + ) + comorbidities.columns = map(str.upper, comorbidities.columns) + info_save = pd.merge(df_save, comorbidities, how="left", on="HADM_ID") + info_save = info_save.drop(columns=["UNNAMED: 0"]) + + print( + "NORMAL Patients in 7DAY: {}".format( + c_aki_7d.loc[c_aki_7d["AKI_STAGE_7DAY"] == 0]["ICUSTAY_ID"].count() + ) + ) + print( + "AKI patients STAGE 1 within 7DAY: {}".format( + c_aki_7d.loc[c_aki_7d["AKI_STAGE_7DAY"] == 1]["ICUSTAY_ID"].count() + ) + ) + print("CKD counted as normal: {}".format(count_ckd_normal)) + print("CKD counted as aki: {}".format(count_ckd_aki)) + print("AKI on admission counted as normal: {}".format(count_akibefore_normal)) + print("AKI on admission counted as aki: {}".format(count_akibefore_aki)) + print("RENAL FAILURE counted as normal: {}".format(count_renalfailure_normal)) + print("RENAL FAILURE counted as aki: {}".format(count_renalfailure_aki)) + print("normal: {}".format(count_normal)) + print("aki: {}".format(count_aki)) + + with open( + os.path.join(formatted_path, "INFO_DATASET_7days_creatinine.csv"), "w" + ) as f: + info_save.to_csv(f, encoding="utf-8", header=True) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--formatted_path", + type=str, + default="./formatted", + help='Path to formatted MIMIC III data from "parse_db.py"', + ) + parser.add_argument( + "--data_path", + type=str, + default="./data", + help="Path to gzipped MIMIC III data", + ) + + args = parser.parse_args() + + formatted_path = args.formatted_path + data_path = args.data_path + + reader = Reader(data_path=data_path) + + os.makedirs(formatted_path, exist_ok=True) + + get_info_admissions(reader, formatted_path) + get_aki_patients_7days_creatinine(reader, formatted_path) + + +if __name__ == "__main__": + main() diff --git a/data/wine_quality/X_test.npy b/data/wine_quality/X_test.npy new file mode 100644 index 0000000..6907d18 Binary files /dev/null and b/data/wine_quality/X_test.npy differ diff --git a/data/wine_quality/X_train.npy b/data/wine_quality/X_train.npy new file mode 100644 index 0000000..26a6acc Binary files /dev/null and b/data/wine_quality/X_train.npy differ diff --git a/data/wine_quality/download.sh b/data/wine_quality/download.sh new file mode 100755 index 0000000..f9dc733 --- /dev/null +++ b/data/wine_quality/download.sh @@ -0,0 +1,9 @@ +#!/bin/bash +# Download Wine Quality dataset from UCI ML Repository +# Dataset ID: 186 + +echo "Downloading Wine Quality dataset from UCI ML Repository..." +echo "Dataset will be prepared using Python script (preprocess.py)" +echo "" +echo "The dataset will be automatically downloaded via ucimlrepo package." +echo "Please run: python preprocess.py" diff --git a/data/wine_quality/feature_metadata.pkl b/data/wine_quality/feature_metadata.pkl new file mode 100644 index 0000000..bd8d7ad Binary files /dev/null and b/data/wine_quality/feature_metadata.pkl differ diff --git a/data/wine_quality/preprocess.py b/data/wine_quality/preprocess.py new file mode 100755 index 0000000..85f8e9c --- /dev/null +++ b/data/wine_quality/preprocess.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python3 +""" +Wine Quality dataset preprocessing script. + +This script: +1. Downloads Wine Quality dataset from UCI ML Repository (ID: 186) +2. Preprocesses features (StandardScaler for numerical, OneHotEncoder for categorical) +3. Creates binary labels (quality >= 7 -> 1, else -> 0) +4. Splits into train/test sets (80/20, stratified) +5. Saves processed data and metadata +""" +import os +import pickle +import random +import argparse +from pathlib import Path + +import numpy as np +import pandas as pd +import torch +from ucimlrepo import fetch_ucirepo +from sklearn.preprocessing import StandardScaler, OneHotEncoder +from sklearn.model_selection import train_test_split + + +def set_seeds(seed: int = 42): + """Set random seeds for reproducibility.""" + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + + +def download_dataset(dataset_id: int = 186): + """ + Download Wine Quality dataset from UCI ML Repository. + + Args: + dataset_id: UCI dataset ID (186 for Wine Quality) + + Returns: + Dataset object from ucimlrepo + """ + print(f"Downloading dataset (ID: {dataset_id}) from UCI ML Repository...") + dataset = fetch_ucirepo(id=dataset_id) + print(f"Dataset '{dataset.metadata['name']}' downloaded successfully!") + print(f" Samples: {len(dataset.data.features)}") + print(f" Features: {len(dataset.data.features.columns)}") + return dataset + + +def preprocess_features(dataset): + """ + Preprocess features using StandardScaler and OneHotEncoder. + + Args: + dataset: UCI dataset object + + Returns: + feature_metadata: Dictionary with encoder and metadata for each feature + input_array: Preprocessed feature array + """ + print("\nPreprocessing features...") + + feature_metadata = {} + input_data = [] + start_idx = 0 + + for col in dataset.data.features.columns: + feature_metadata[col] = {} + + if dataset.data.features[col].dtype == "object": + # Categorical feature + feature_metadata[col]['type'] = "categorical" + onehot = OneHotEncoder(handle_unknown='ignore') + feature_val = dataset.data.features[col].fillna("missing") + preprocessed = onehot.fit_transform(feature_val.values.reshape(-1, 1)).toarray() + + # Calculate categorical distribution + cat_dist = feature_val.value_counts(dropna=False) / len(dataset.data.features) + cat_dist = cat_dist.loc[onehot.categories_[0]].values + + feature_metadata[col]['encoder'] = onehot + feature_metadata[col]['cat_dist'] = cat_dist + feature_metadata[col]['index'] = np.arange(start_idx, start_idx + preprocessed.shape[1]) + start_idx += preprocessed.shape[1] + + else: + # Numerical feature + feature_metadata[col]['type'] = "numerical" + scaler = StandardScaler() + preprocessed = scaler.fit_transform( + dataset.data.features[col].values.reshape(-1, 1) + ) + + feature_metadata[col]['encoder'] = scaler + feature_metadata[col]['index'] = start_idx + start_idx += 1 + + input_data.append(preprocessed) + + input_array = np.concatenate(input_data, axis=1) + + print(f" Processed features: {input_array.shape[1]}") + print(f" Feature metadata created for {len(feature_metadata)} features") + + return feature_metadata, input_array + + +def create_labels(dataset): + """ + Create binary labels from quality scores. + + Quality scores 7, 8, 9 -> 1 (high quality) + Other scores -> 0 (normal quality) + + Args: + dataset: UCI dataset object + + Returns: + y: Binary labels array + """ + print("\nCreating binary labels...") + + # Check target distribution + print(f" Quality score distribution:") + print(dataset.data.targets.value_counts().sort_index()) + + # Create binary labels + y = dataset.data.targets.isin([7, 8, 9]).values.astype(int)[:, 0] + + print(f" Binary label distribution:") + unique, counts = np.unique(y, return_counts=True) + for label, count in zip(unique, counts): + print(f" Class {label}: {count} ({count/len(y)*100:.1f}%)") + + return y + + +def split_data(X, y, test_size=0.2, random_state=42): + """ + Split data into train and test sets. + + Args: + X: Feature array + y: Label array + test_size: Test set size (default: 0.2) + random_state: Random seed (default: 42) + + Returns: + X_train, X_test, y_train, y_test + """ + print(f"\nSplitting data (test_size={test_size}, random_state={random_state})...") + + X_train, X_test, y_train, y_test = train_test_split( + X, y, test_size=test_size, random_state=random_state, stratify=y + ) + + print(f" Train set: {len(X_train)} samples") + print(f" Test set: {len(X_test)} samples") + + return X_train, X_test, y_train, y_test + + +def save_data(dataset, X_train, X_test, y_train, y_test, feature_metadata, output_dir): + """ + Save processed data and metadata. + + Args: + dataset: UCI dataset object + X_train, X_test, y_train, y_test: Train/test data + feature_metadata: Feature metadata dictionary + output_dir: Output directory path + """ + output_path = Path(output_dir) + output_path.mkdir(parents=True, exist_ok=True) + + print(f"\nSaving processed data to {output_path}...") + + # Save train/test splits + np.save(output_path / "X_train.npy", X_train) + np.save(output_path / "X_test.npy", X_test) + np.save(output_path / "y_train.npy", y_train) + np.save(output_path / "y_test.npy", y_test) + print(" ✓ Saved train/test splits (.npy)") + + # Save feature metadata + with open(output_path / "feature_metadata.pkl", "wb") as f: + pickle.dump(feature_metadata, f) + print(" ✓ Saved feature metadata (.pkl)") + + # Save raw data for reference + raw_data = dataset.data.features.copy() + raw_data.to_csv(output_path / "raw_data.csv", index=False) + print(" ✓ Saved raw data (.csv)") + + print("\nData preprocessing completed successfully!") + print(f"\nSummary:") + print(f" Total samples: {len(X_train) + len(X_test)}") + print(f" Training samples: {len(X_train)}") + print(f" Test samples: {len(X_test)}") + print(f" Features: {X_train.shape[1]}") + print(f" Output directory: {output_path.absolute()}") + + +def main(): + parser = argparse.ArgumentParser( + description="Download and preprocess Wine Quality dataset" + ) + parser.add_argument( + "--dataset-id", + type=int, + default=186, + help="UCI dataset ID (default: 186 for Wine Quality)" + ) + parser.add_argument( + "--output-dir", + type=str, + default=None, + help="Output directory (default: same directory as this script)" + ) + parser.add_argument( + "--test-size", + type=float, + default=0.2, + help="Test set size (default: 0.2)" + ) + parser.add_argument( + "--seed", + type=int, + default=42, + help="Random seed (default: 42)" + ) + + args = parser.parse_args() + + # Set output directory to script directory if not specified + if args.output_dir is None: + args.output_dir = Path(__file__).parent.absolute() + + # Set random seeds + set_seeds(args.seed) + + # Download dataset + dataset = download_dataset(args.dataset_id) + + # Preprocess features + feature_metadata, input_array = preprocess_features(dataset) + + # Create labels + y = create_labels(dataset) + + # Split data + X_train, X_test, y_train, y_test = split_data( + input_array, y, test_size=args.test_size, random_state=args.seed + ) + + # Save data + save_data( + dataset, X_train, X_test, y_train, y_test, + feature_metadata, args.output_dir + ) + + +if __name__ == "__main__": + main() diff --git a/data/wine_quality/raw_data.csv b/data/wine_quality/raw_data.csv new file mode 100644 index 0000000..ca2785b --- /dev/null +++ b/data/wine_quality/raw_data.csv @@ -0,0 +1,6498 @@ +fixed_acidity,volatile_acidity,citric_acid,residual_sugar,chlorides,free_sulfur_dioxide,total_sulfur_dioxide,density,pH,sulphates,alcohol +7.4,0.7,0.0,1.9,0.076,11.0,34.0,0.9978,3.51,0.56,9.4 +7.8,0.88,0.0,2.6,0.098,25.0,67.0,0.9968,3.2,0.68,9.8 +7.8,0.76,0.04,2.3,0.092,15.0,54.0,0.997,3.26,0.65,9.8 +11.2,0.28,0.56,1.9,0.075,17.0,60.0,0.998,3.16,0.58,9.8 +7.4,0.7,0.0,1.9,0.076,11.0,34.0,0.9978,3.51,0.56,9.4 +7.4,0.66,0.0,1.8,0.075,13.0,40.0,0.9978,3.51,0.56,9.4 +7.9,0.6,0.06,1.6,0.069,15.0,59.0,0.9964,3.3,0.46,9.4 +7.3,0.65,0.0,1.2,0.065,15.0,21.0,0.9946,3.39,0.47,10.0 +7.8,0.58,0.02,2.0,0.073,9.0,18.0,0.9968,3.36,0.57,9.5 +7.5,0.5,0.36,6.1,0.071,17.0,102.0,0.9978,3.35,0.8,10.5 +6.7,0.58,0.08,1.8,0.097,15.0,65.0,0.9959,3.28,0.54,9.2 +7.5,0.5,0.36,6.1,0.071,17.0,102.0,0.9978,3.35,0.8,10.5 +5.6,0.615,0.0,1.6,0.089,16.0,59.0,0.9943,3.58,0.52,9.9 +7.8,0.61,0.29,1.6,0.114,9.0,29.0,0.9974,3.26,1.56,9.1 +8.9,0.62,0.18,3.8,0.176,52.0,145.0,0.9986,3.16,0.88,9.2 +8.9,0.62,0.19,3.9,0.17,51.0,148.0,0.9986,3.17,0.93,9.2 +8.5,0.28,0.56,1.8,0.092,35.0,103.0,0.9969,3.3,0.75,10.5 +8.1,0.56,0.28,1.7,0.368,16.0,56.0,0.9968,3.11,1.28,9.3 +7.4,0.59,0.08,4.4,0.086,6.0,29.0,0.9974,3.38,0.5,9.0 +7.9,0.32,0.51,1.8,0.341,17.0,56.0,0.9969,3.04,1.08,9.2 +8.9,0.22,0.48,1.8,0.077,29.0,60.0,0.9968,3.39,0.53,9.4 +7.6,0.39,0.31,2.3,0.082,23.0,71.0,0.9982,3.52,0.65,9.7 +7.9,0.43,0.21,1.6,0.106,10.0,37.0,0.9966,3.17,0.91,9.5 +8.5,0.49,0.11,2.3,0.084,9.0,67.0,0.9968,3.17,0.53,9.4 +6.9,0.4,0.14,2.4,0.085,21.0,40.0,0.9968,3.43,0.63,9.7 +6.3,0.39,0.16,1.4,0.08,11.0,23.0,0.9955,3.34,0.56,9.3 +7.6,0.41,0.24,1.8,0.08,4.0,11.0,0.9962,3.28,0.59,9.5 +7.9,0.43,0.21,1.6,0.106,10.0,37.0,0.9966,3.17,0.91,9.5 +7.1,0.71,0.0,1.9,0.08,14.0,35.0,0.9972,3.47,0.55,9.4 +7.8,0.645,0.0,2.0,0.082,8.0,16.0,0.9964,3.38,0.59,9.8 +6.7,0.675,0.07,2.4,0.089,17.0,82.0,0.9958,3.35,0.54,10.1 +6.9,0.685,0.0,2.5,0.105,22.0,37.0,0.9966,3.46,0.57,10.6 +8.3,0.655,0.12,2.3,0.083,15.0,113.0,0.9966,3.17,0.66,9.8 +6.9,0.605,0.12,10.7,0.073,40.0,83.0,0.9993,3.45,0.52,9.4 +5.2,0.32,0.25,1.8,0.103,13.0,50.0,0.9957,3.38,0.55,9.2 +7.8,0.645,0.0,5.5,0.086,5.0,18.0,0.9986,3.4,0.55,9.6 +7.8,0.6,0.14,2.4,0.086,3.0,15.0,0.9975,3.42,0.6,10.8 +8.1,0.38,0.28,2.1,0.066,13.0,30.0,0.9968,3.23,0.73,9.7 +5.7,1.13,0.09,1.5,0.172,7.0,19.0,0.994,3.5,0.48,9.8 +7.3,0.45,0.36,5.9,0.074,12.0,87.0,0.9978,3.33,0.83,10.5 +7.3,0.45,0.36,5.9,0.074,12.0,87.0,0.9978,3.33,0.83,10.5 +8.8,0.61,0.3,2.8,0.088,17.0,46.0,0.9976,3.26,0.51,9.3 +7.5,0.49,0.2,2.6,0.332,8.0,14.0,0.9968,3.21,0.9,10.5 +8.1,0.66,0.22,2.2,0.069,9.0,23.0,0.9968,3.3,1.2,10.3 +6.8,0.67,0.02,1.8,0.05,5.0,11.0,0.9962,3.48,0.52,9.5 +4.6,0.52,0.15,2.1,0.054,8.0,65.0,0.9934,3.9,0.56,13.1 +7.7,0.935,0.43,2.2,0.114,22.0,114.0,0.997,3.25,0.73,9.2 +8.7,0.29,0.52,1.6,0.113,12.0,37.0,0.9969,3.25,0.58,9.5 +6.4,0.4,0.23,1.6,0.066,5.0,12.0,0.9958,3.34,0.56,9.2 +5.6,0.31,0.37,1.4,0.074,12.0,96.0,0.9954,3.32,0.58,9.2 +8.8,0.66,0.26,1.7,0.074,4.0,23.0,0.9971,3.15,0.74,9.2 +6.6,0.52,0.04,2.2,0.069,8.0,15.0,0.9956,3.4,0.63,9.4 +6.6,0.5,0.04,2.1,0.068,6.0,14.0,0.9955,3.39,0.64,9.4 +8.6,0.38,0.36,3.0,0.081,30.0,119.0,0.997,3.2,0.56,9.4 +7.6,0.51,0.15,2.8,0.11,33.0,73.0,0.9955,3.17,0.63,10.2 +7.7,0.62,0.04,3.8,0.084,25.0,45.0,0.9978,3.34,0.53,9.5 +10.2,0.42,0.57,3.4,0.07,4.0,10.0,0.9971,3.04,0.63,9.6 +7.5,0.63,0.12,5.1,0.111,50.0,110.0,0.9983,3.26,0.77,9.4 +7.8,0.59,0.18,2.3,0.076,17.0,54.0,0.9975,3.43,0.59,10.0 +7.3,0.39,0.31,2.4,0.074,9.0,46.0,0.9962,3.41,0.54,9.4 +8.8,0.4,0.4,2.2,0.079,19.0,52.0,0.998,3.44,0.64,9.2 +7.7,0.69,0.49,1.8,0.115,20.0,112.0,0.9968,3.21,0.71,9.3 +7.5,0.52,0.16,1.9,0.085,12.0,35.0,0.9968,3.38,0.62,9.5 +7.0,0.735,0.05,2.0,0.081,13.0,54.0,0.9966,3.39,0.57,9.8 +7.2,0.725,0.05,4.65,0.086,4.0,11.0,0.9962,3.41,0.39,10.9 +7.2,0.725,0.05,4.65,0.086,4.0,11.0,0.9962,3.41,0.39,10.9 +7.5,0.52,0.11,1.5,0.079,11.0,39.0,0.9968,3.42,0.58,9.6 +6.6,0.705,0.07,1.6,0.076,6.0,15.0,0.9962,3.44,0.58,10.7 +9.3,0.32,0.57,2.0,0.074,27.0,65.0,0.9969,3.28,0.79,10.7 +8.0,0.705,0.05,1.9,0.074,8.0,19.0,0.9962,3.34,0.95,10.5 +7.7,0.63,0.08,1.9,0.076,15.0,27.0,0.9967,3.32,0.54,9.5 +7.7,0.67,0.23,2.1,0.088,17.0,96.0,0.9962,3.32,0.48,9.5 +7.7,0.69,0.22,1.9,0.084,18.0,94.0,0.9961,3.31,0.48,9.5 +8.3,0.675,0.26,2.1,0.084,11.0,43.0,0.9976,3.31,0.53,9.2 +9.7,0.32,0.54,2.5,0.094,28.0,83.0,0.9984,3.28,0.82,9.6 +8.8,0.41,0.64,2.2,0.093,9.0,42.0,0.9986,3.54,0.66,10.5 +8.8,0.41,0.64,2.2,0.093,9.0,42.0,0.9986,3.54,0.66,10.5 +6.8,0.785,0.0,2.4,0.104,14.0,30.0,0.9966,3.52,0.55,10.7 +6.7,0.75,0.12,2.0,0.086,12.0,80.0,0.9958,3.38,0.52,10.1 +8.3,0.625,0.2,1.5,0.08,27.0,119.0,0.9972,3.16,1.12,9.1 +6.2,0.45,0.2,1.6,0.069,3.0,15.0,0.9958,3.41,0.56,9.2 +7.8,0.43,0.7,1.9,0.464,22.0,67.0,0.9974,3.13,1.28,9.4 +7.4,0.5,0.47,2.0,0.086,21.0,73.0,0.997,3.36,0.57,9.1 +7.3,0.67,0.26,1.8,0.401,16.0,51.0,0.9969,3.16,1.14,9.4 +6.3,0.3,0.48,1.8,0.069,18.0,61.0,0.9959,3.44,0.78,10.3 +6.9,0.55,0.15,2.2,0.076,19.0,40.0,0.9961,3.41,0.59,10.1 +8.6,0.49,0.28,1.9,0.11,20.0,136.0,0.9972,2.93,1.95,9.9 +7.7,0.49,0.26,1.9,0.062,9.0,31.0,0.9966,3.39,0.64,9.6 +9.3,0.39,0.44,2.1,0.107,34.0,125.0,0.9978,3.14,1.22,9.5 +7.0,0.62,0.08,1.8,0.076,8.0,24.0,0.9978,3.48,0.53,9.0 +7.9,0.52,0.26,1.9,0.079,42.0,140.0,0.9964,3.23,0.54,9.5 +8.6,0.49,0.28,1.9,0.11,20.0,136.0,0.9972,2.93,1.95,9.9 +8.6,0.49,0.29,2.0,0.11,19.0,133.0,0.9972,2.93,1.98,9.8 +7.7,0.49,0.26,1.9,0.062,9.0,31.0,0.9966,3.39,0.64,9.6 +5.0,1.02,0.04,1.4,0.045,41.0,85.0,0.9938,3.75,0.48,10.5 +4.7,0.6,0.17,2.3,0.058,17.0,106.0,0.9932,3.85,0.6,12.9 +6.8,0.775,0.0,3.0,0.102,8.0,23.0,0.9965,3.45,0.56,10.7 +7.0,0.5,0.25,2.0,0.07,3.0,22.0,0.9963,3.25,0.63,9.2 +7.6,0.9,0.06,2.5,0.079,5.0,10.0,0.9967,3.39,0.56,9.8 +8.1,0.545,0.18,1.9,0.08,13.0,35.0,0.9972,3.3,0.59,9.0 +8.3,0.61,0.3,2.1,0.084,11.0,50.0,0.9972,3.4,0.61,10.2 +7.8,0.5,0.3,1.9,0.075,8.0,22.0,0.9959,3.31,0.56,10.4 +8.1,0.545,0.18,1.9,0.08,13.0,35.0,0.9972,3.3,0.59,9.0 +8.1,0.575,0.22,2.1,0.077,12.0,65.0,0.9967,3.29,0.51,9.2 +7.2,0.49,0.24,2.2,0.07,5.0,36.0,0.996,3.33,0.48,9.4 +8.1,0.575,0.22,2.1,0.077,12.0,65.0,0.9967,3.29,0.51,9.2 +7.8,0.41,0.68,1.7,0.467,18.0,69.0,0.9973,3.08,1.31,9.3 +6.2,0.63,0.31,1.7,0.088,15.0,64.0,0.9969,3.46,0.79,9.3 +8.0,0.33,0.53,2.5,0.091,18.0,80.0,0.9976,3.37,0.8,9.6 +8.1,0.785,0.52,2.0,0.122,37.0,153.0,0.9969,3.21,0.69,9.3 +7.8,0.56,0.19,1.8,0.104,12.0,47.0,0.9964,3.19,0.93,9.5 +8.4,0.62,0.09,2.2,0.084,11.0,108.0,0.9964,3.15,0.66,9.8 +8.4,0.6,0.1,2.2,0.085,14.0,111.0,0.9964,3.15,0.66,9.8 +10.1,0.31,0.44,2.3,0.08,22.0,46.0,0.9988,3.32,0.67,9.7 +7.8,0.56,0.19,1.8,0.104,12.0,47.0,0.9964,3.19,0.93,9.5 +9.4,0.4,0.31,2.2,0.09,13.0,62.0,0.9966,3.07,0.63,10.5 +8.3,0.54,0.28,1.9,0.077,11.0,40.0,0.9978,3.39,0.61,10.0 +7.8,0.56,0.12,2.0,0.082,7.0,28.0,0.997,3.37,0.5,9.4 +8.8,0.55,0.04,2.2,0.119,14.0,56.0,0.9962,3.21,0.6,10.9 +7.0,0.69,0.08,1.8,0.097,22.0,89.0,0.9959,3.34,0.54,9.2 +7.3,1.07,0.09,1.7,0.178,10.0,89.0,0.9962,3.3,0.57,9.0 +8.8,0.55,0.04,2.2,0.119,14.0,56.0,0.9962,3.21,0.6,10.9 +7.3,0.695,0.0,2.5,0.075,3.0,13.0,0.998,3.49,0.52,9.2 +8.0,0.71,0.0,2.6,0.08,11.0,34.0,0.9976,3.44,0.53,9.5 +7.8,0.5,0.17,1.6,0.082,21.0,102.0,0.996,3.39,0.48,9.5 +9.0,0.62,0.04,1.9,0.146,27.0,90.0,0.9984,3.16,0.7,9.4 +8.2,1.33,0.0,1.7,0.081,3.0,12.0,0.9964,3.53,0.49,10.9 +8.1,1.33,0.0,1.8,0.082,3.0,12.0,0.9964,3.54,0.48,10.9 +8.0,0.59,0.16,1.8,0.065,3.0,16.0,0.9962,3.42,0.92,10.5 +6.1,0.38,0.15,1.8,0.072,6.0,19.0,0.9955,3.42,0.57,9.4 +8.0,0.745,0.56,2.0,0.118,30.0,134.0,0.9968,3.24,0.66,9.4 +5.6,0.5,0.09,2.3,0.049,17.0,99.0,0.9937,3.63,0.63,13.0 +5.6,0.5,0.09,2.3,0.049,17.0,99.0,0.9937,3.63,0.63,13.0 +6.6,0.5,0.01,1.5,0.06,17.0,26.0,0.9952,3.4,0.58,9.8 +7.9,1.04,0.05,2.2,0.084,13.0,29.0,0.9959,3.22,0.55,9.9 +8.4,0.745,0.11,1.9,0.09,16.0,63.0,0.9965,3.19,0.82,9.6 +8.3,0.715,0.15,1.8,0.089,10.0,52.0,0.9968,3.23,0.77,9.5 +7.2,0.415,0.36,2.0,0.081,13.0,45.0,0.9972,3.48,0.64,9.2 +7.8,0.56,0.19,2.1,0.081,15.0,105.0,0.9962,3.33,0.54,9.5 +7.8,0.56,0.19,2.0,0.081,17.0,108.0,0.9962,3.32,0.54,9.5 +8.4,0.745,0.11,1.9,0.09,16.0,63.0,0.9965,3.19,0.82,9.6 +8.3,0.715,0.15,1.8,0.089,10.0,52.0,0.9968,3.23,0.77,9.5 +5.2,0.34,0.0,1.8,0.05,27.0,63.0,0.9916,3.68,0.79,14.0 +6.3,0.39,0.08,1.7,0.066,3.0,20.0,0.9954,3.34,0.58,9.4 +5.2,0.34,0.0,1.8,0.05,27.0,63.0,0.9916,3.68,0.79,14.0 +8.1,0.67,0.55,1.8,0.117,32.0,141.0,0.9968,3.17,0.62,9.4 +5.8,0.68,0.02,1.8,0.087,21.0,94.0,0.9944,3.54,0.52,10.0 +7.6,0.49,0.26,1.6,0.236,10.0,88.0,0.9968,3.11,0.8,9.3 +6.9,0.49,0.1,2.3,0.074,12.0,30.0,0.9959,3.42,0.58,10.2 +8.2,0.4,0.44,2.8,0.089,11.0,43.0,0.9975,3.53,0.61,10.5 +7.3,0.33,0.47,2.1,0.077,5.0,11.0,0.9958,3.33,0.53,10.3 +9.2,0.52,1.0,3.4,0.61,32.0,69.0,0.9996,2.74,2.0,9.4 +7.5,0.6,0.03,1.8,0.095,25.0,99.0,0.995,3.35,0.54,10.1 +7.5,0.6,0.03,1.8,0.095,25.0,99.0,0.995,3.35,0.54,10.1 +7.1,0.43,0.42,5.5,0.07,29.0,129.0,0.9973,3.42,0.72,10.5 +7.1,0.43,0.42,5.5,0.071,28.0,128.0,0.9973,3.42,0.71,10.5 +7.1,0.43,0.42,5.5,0.07,29.0,129.0,0.9973,3.42,0.72,10.5 +7.1,0.43,0.42,5.5,0.071,28.0,128.0,0.9973,3.42,0.71,10.5 +7.1,0.68,0.0,2.2,0.073,12.0,22.0,0.9969,3.48,0.5,9.3 +6.8,0.6,0.18,1.9,0.079,18.0,86.0,0.9968,3.59,0.57,9.3 +7.6,0.95,0.03,2.0,0.09,7.0,20.0,0.9959,3.2,0.56,9.6 +7.6,0.68,0.02,1.3,0.072,9.0,20.0,0.9965,3.17,1.08,9.2 +7.8,0.53,0.04,1.7,0.076,17.0,31.0,0.9964,3.33,0.56,10.0 +7.4,0.6,0.26,7.3,0.07,36.0,121.0,0.9982,3.37,0.49,9.4 +7.3,0.59,0.26,7.2,0.07,35.0,121.0,0.9981,3.37,0.49,9.4 +7.8,0.63,0.48,1.7,0.1,14.0,96.0,0.9961,3.19,0.62,9.5 +6.8,0.64,0.1,2.1,0.085,18.0,101.0,0.9956,3.34,0.52,10.2 +7.3,0.55,0.03,1.6,0.072,17.0,42.0,0.9956,3.37,0.48,9.0 +6.8,0.63,0.07,2.1,0.089,11.0,44.0,0.9953,3.47,0.55,10.4 +7.5,0.705,0.24,1.8,0.36,15.0,63.0,0.9964,3.0,1.59,9.5 +7.9,0.885,0.03,1.8,0.058,4.0,8.0,0.9972,3.36,0.33,9.1 +8.0,0.42,0.17,2.0,0.073,6.0,18.0,0.9972,3.29,0.61,9.2 +8.0,0.42,0.17,2.0,0.073,6.0,18.0,0.9972,3.29,0.61,9.2 +7.4,0.62,0.05,1.9,0.068,24.0,42.0,0.9961,3.42,0.57,11.5 +7.3,0.38,0.21,2.0,0.08,7.0,35.0,0.9961,3.33,0.47,9.5 +6.9,0.5,0.04,1.5,0.085,19.0,49.0,0.9958,3.35,0.78,9.5 +7.3,0.38,0.21,2.0,0.08,7.0,35.0,0.9961,3.33,0.47,9.5 +7.5,0.52,0.42,2.3,0.087,8.0,38.0,0.9972,3.58,0.61,10.5 +7.0,0.805,0.0,2.5,0.068,7.0,20.0,0.9969,3.48,0.56,9.6 +8.8,0.61,0.14,2.4,0.067,10.0,42.0,0.9969,3.19,0.59,9.5 +8.8,0.61,0.14,2.4,0.067,10.0,42.0,0.9969,3.19,0.59,9.5 +8.9,0.61,0.49,2.0,0.27,23.0,110.0,0.9972,3.12,1.02,9.3 +7.2,0.73,0.02,2.5,0.076,16.0,42.0,0.9972,3.44,0.52,9.3 +6.8,0.61,0.2,1.8,0.077,11.0,65.0,0.9971,3.54,0.58,9.3 +6.7,0.62,0.21,1.9,0.079,8.0,62.0,0.997,3.52,0.58,9.3 +8.9,0.31,0.57,2.0,0.111,26.0,85.0,0.9971,3.26,0.53,9.7 +7.4,0.39,0.48,2.0,0.082,14.0,67.0,0.9972,3.34,0.55,9.2 +7.7,0.705,0.1,2.6,0.084,9.0,26.0,0.9976,3.39,0.49,9.7 +7.9,0.5,0.33,2.0,0.084,15.0,143.0,0.9968,3.2,0.55,9.5 +7.9,0.49,0.32,1.9,0.082,17.0,144.0,0.9968,3.2,0.55,9.5 +8.2,0.5,0.35,2.9,0.077,21.0,127.0,0.9976,3.23,0.62,9.4 +6.4,0.37,0.25,1.9,0.074,21.0,49.0,0.9974,3.57,0.62,9.8 +6.8,0.63,0.12,3.8,0.099,16.0,126.0,0.9969,3.28,0.61,9.5 +7.6,0.55,0.21,2.2,0.071,7.0,28.0,0.9964,3.28,0.55,9.7 +7.6,0.55,0.21,2.2,0.071,7.0,28.0,0.9964,3.28,0.55,9.7 +7.8,0.59,0.33,2.0,0.074,24.0,120.0,0.9968,3.25,0.54,9.4 +7.3,0.58,0.3,2.4,0.074,15.0,55.0,0.9968,3.46,0.59,10.2 +11.5,0.3,0.6,2.0,0.067,12.0,27.0,0.9981,3.11,0.97,10.1 +5.4,0.835,0.08,1.2,0.046,13.0,93.0,0.9924,3.57,0.85,13.0 +6.9,1.09,0.06,2.1,0.061,12.0,31.0,0.9948,3.51,0.43,11.4 +9.6,0.32,0.47,1.4,0.056,9.0,24.0,0.99695,3.22,0.82,10.3 +8.8,0.37,0.48,2.1,0.097,39.0,145.0,0.9975,3.04,1.03,9.3 +6.8,0.5,0.11,1.5,0.075,16.0,49.0,0.99545,3.36,0.79,9.5 +7.0,0.42,0.35,1.6,0.088,16.0,39.0,0.9961,3.34,0.55,9.2 +7.0,0.43,0.36,1.6,0.089,14.0,37.0,0.99615,3.34,0.56,9.2 +12.8,0.3,0.74,2.6,0.095,9.0,28.0,0.9994,3.2,0.77,10.8 +12.8,0.3,0.74,2.6,0.095,9.0,28.0,0.9994,3.2,0.77,10.8 +7.8,0.57,0.31,1.8,0.069,26.0,120.0,0.99625,3.29,0.53,9.3 +7.8,0.44,0.28,2.7,0.1,18.0,95.0,0.9966,3.22,0.67,9.4 +11.0,0.3,0.58,2.1,0.054,7.0,19.0,0.998,3.31,0.88,10.5 +9.7,0.53,0.6,2.0,0.039,5.0,19.0,0.99585,3.3,0.86,12.4 +8.0,0.725,0.24,2.8,0.083,10.0,62.0,0.99685,3.35,0.56,10.0 +11.6,0.44,0.64,2.1,0.059,5.0,15.0,0.998,3.21,0.67,10.2 +8.2,0.57,0.26,2.2,0.06,28.0,65.0,0.9959,3.3,0.43,10.1 +7.8,0.735,0.08,2.4,0.092,10.0,41.0,0.9974,3.24,0.71,9.8 +7.0,0.49,0.49,5.6,0.06,26.0,121.0,0.9974,3.34,0.76,10.5 +8.7,0.625,0.16,2.0,0.101,13.0,49.0,0.9962,3.14,0.57,11.0 +8.1,0.725,0.22,2.2,0.072,11.0,41.0,0.9967,3.36,0.55,9.1 +7.5,0.49,0.19,1.9,0.076,10.0,44.0,0.9957,3.39,0.54,9.7 +7.8,0.53,0.33,2.4,0.08,24.0,144.0,0.99655,3.3,0.6,9.5 +7.8,0.34,0.37,2.0,0.082,24.0,58.0,0.9964,3.34,0.59,9.4 +7.4,0.53,0.26,2.0,0.101,16.0,72.0,0.9957,3.15,0.57,9.4 +6.8,0.61,0.04,1.5,0.057,5.0,10.0,0.99525,3.42,0.6,9.5 +8.6,0.645,0.25,2.0,0.083,8.0,28.0,0.99815,3.28,0.6,10.0 +8.4,0.635,0.36,2.0,0.089,15.0,55.0,0.99745,3.31,0.57,10.4 +7.7,0.43,0.25,2.6,0.073,29.0,63.0,0.99615,3.37,0.58,10.5 +8.9,0.59,0.5,2.0,0.337,27.0,81.0,0.9964,3.04,1.61,9.5 +9.0,0.82,0.14,2.6,0.089,9.0,23.0,0.9984,3.39,0.63,9.8 +7.7,0.43,0.25,2.6,0.073,29.0,63.0,0.99615,3.37,0.58,10.5 +6.9,0.52,0.25,2.6,0.081,10.0,37.0,0.99685,3.46,0.5,11.0 +5.2,0.48,0.04,1.6,0.054,19.0,106.0,0.9927,3.54,0.62,12.2 +8.0,0.38,0.06,1.8,0.078,12.0,49.0,0.99625,3.37,0.52,9.9 +8.5,0.37,0.2,2.8,0.09,18.0,58.0,0.998,3.34,0.7,9.6 +6.9,0.52,0.25,2.6,0.081,10.0,37.0,0.99685,3.46,0.5,11.0 +8.2,1.0,0.09,2.3,0.065,7.0,37.0,0.99685,3.32,0.55,9.0 +7.2,0.63,0.0,1.9,0.097,14.0,38.0,0.99675,3.37,0.58,9.0 +7.2,0.63,0.0,1.9,0.097,14.0,38.0,0.99675,3.37,0.58,9.0 +7.2,0.645,0.0,1.9,0.097,15.0,39.0,0.99675,3.37,0.58,9.2 +7.2,0.63,0.0,1.9,0.097,14.0,38.0,0.99675,3.37,0.58,9.0 +8.2,1.0,0.09,2.3,0.065,7.0,37.0,0.99685,3.32,0.55,9.0 +8.9,0.635,0.37,1.7,0.263,5.0,62.0,0.9971,3.0,1.09,9.3 +12.0,0.38,0.56,2.1,0.093,6.0,24.0,0.99925,3.14,0.71,10.9 +7.7,0.58,0.1,1.8,0.102,28.0,109.0,0.99565,3.08,0.49,9.8 +15.0,0.21,0.44,2.2,0.075,10.0,24.0,1.00005,3.07,0.84,9.2 +15.0,0.21,0.44,2.2,0.075,10.0,24.0,1.00005,3.07,0.84,9.2 +7.3,0.66,0.0,2.0,0.084,6.0,23.0,0.9983,3.61,0.96,9.9 +7.1,0.68,0.07,1.9,0.075,16.0,51.0,0.99685,3.38,0.52,9.5 +8.2,0.6,0.17,2.3,0.072,11.0,73.0,0.9963,3.2,0.45,9.3 +7.7,0.53,0.06,1.7,0.074,9.0,39.0,0.99615,3.35,0.48,9.8 +7.3,0.66,0.0,2.0,0.084,6.0,23.0,0.9983,3.61,0.96,9.9 +10.8,0.32,0.44,1.6,0.063,16.0,37.0,0.9985,3.22,0.78,10.0 +7.1,0.6,0.0,1.8,0.074,16.0,34.0,0.9972,3.47,0.7,9.9 +11.1,0.35,0.48,3.1,0.09,5.0,21.0,0.9986,3.17,0.53,10.5 +7.7,0.775,0.42,1.9,0.092,8.0,86.0,0.9959,3.23,0.59,9.5 +7.1,0.6,0.0,1.8,0.074,16.0,34.0,0.9972,3.47,0.7,9.9 +8.0,0.57,0.23,3.2,0.073,17.0,119.0,0.99675,3.26,0.57,9.3 +9.4,0.34,0.37,2.2,0.075,5.0,13.0,0.998,3.22,0.62,9.2 +6.6,0.695,0.0,2.1,0.075,12.0,56.0,0.9968,3.49,0.67,9.2 +7.7,0.41,0.76,1.8,0.611,8.0,45.0,0.9968,3.06,1.26,9.4 +10.0,0.31,0.47,2.6,0.085,14.0,33.0,0.99965,3.36,0.8,10.5 +7.9,0.33,0.23,1.7,0.077,18.0,45.0,0.99625,3.29,0.65,9.3 +7.0,0.975,0.04,2.0,0.087,12.0,67.0,0.99565,3.35,0.6,9.4 +8.0,0.52,0.03,1.7,0.07,10.0,35.0,0.99575,3.34,0.57,10.0 +7.9,0.37,0.23,1.8,0.077,23.0,49.0,0.9963,3.28,0.67,9.3 +12.5,0.56,0.49,2.4,0.064,5.0,27.0,0.9999,3.08,0.87,10.9 +11.8,0.26,0.52,1.8,0.071,6.0,10.0,0.9968,3.2,0.72,10.2 +8.1,0.87,0.0,3.3,0.096,26.0,61.0,1.00025,3.6,0.72,9.8 +7.9,0.35,0.46,3.6,0.078,15.0,37.0,0.9973,3.35,0.86,12.8 +6.9,0.54,0.04,3.0,0.077,7.0,27.0,0.9987,3.69,0.91,9.4 +11.5,0.18,0.51,4.0,0.104,4.0,23.0,0.9996,3.28,0.97,10.1 +7.9,0.545,0.06,4.0,0.087,27.0,61.0,0.9965,3.36,0.67,10.7 +11.5,0.18,0.51,4.0,0.104,4.0,23.0,0.9996,3.28,0.97,10.1 +10.9,0.37,0.58,4.0,0.071,17.0,65.0,0.99935,3.22,0.78,10.1 +8.4,0.715,0.2,2.4,0.076,10.0,38.0,0.99735,3.31,0.64,9.4 +7.5,0.65,0.18,7.0,0.088,27.0,94.0,0.99915,3.38,0.77,9.4 +7.9,0.545,0.06,4.0,0.087,27.0,61.0,0.9965,3.36,0.67,10.7 +6.9,0.54,0.04,3.0,0.077,7.0,27.0,0.9987,3.69,0.91,9.4 +11.5,0.18,0.51,4.0,0.104,4.0,23.0,0.9996,3.28,0.97,10.1 +10.3,0.32,0.45,6.4,0.073,5.0,13.0,0.9976,3.23,0.82,12.6 +8.9,0.4,0.32,5.6,0.087,10.0,47.0,0.9991,3.38,0.77,10.5 +11.4,0.26,0.44,3.6,0.071,6.0,19.0,0.9986,3.12,0.82,9.3 +7.7,0.27,0.68,3.5,0.358,5.0,10.0,0.9972,3.25,1.08,9.9 +7.6,0.52,0.12,3.0,0.067,12.0,53.0,0.9971,3.36,0.57,9.1 +8.9,0.4,0.32,5.6,0.087,10.0,47.0,0.9991,3.38,0.77,10.5 +9.9,0.59,0.07,3.4,0.102,32.0,71.0,1.00015,3.31,0.71,9.8 +9.9,0.59,0.07,3.4,0.102,32.0,71.0,1.00015,3.31,0.71,9.8 +12.0,0.45,0.55,2.0,0.073,25.0,49.0,0.9997,3.1,0.76,10.3 +7.5,0.4,0.12,3.0,0.092,29.0,53.0,0.9967,3.37,0.7,10.3 +8.7,0.52,0.09,2.5,0.091,20.0,49.0,0.9976,3.34,0.86,10.6 +11.6,0.42,0.53,3.3,0.105,33.0,98.0,1.001,3.2,0.95,9.2 +8.7,0.52,0.09,2.5,0.091,20.0,49.0,0.9976,3.34,0.86,10.6 +11.0,0.2,0.48,2.0,0.343,6.0,18.0,0.9979,3.3,0.71,10.5 +10.4,0.55,0.23,2.7,0.091,18.0,48.0,0.9994,3.22,0.64,10.3 +6.9,0.36,0.25,2.4,0.098,5.0,16.0,0.9964,3.41,0.6,10.1 +13.3,0.34,0.52,3.2,0.094,17.0,53.0,1.0014,3.05,0.81,9.5 +10.8,0.5,0.46,2.5,0.073,5.0,27.0,1.0001,3.05,0.64,9.5 +10.6,0.83,0.37,2.6,0.086,26.0,70.0,0.9981,3.16,0.52,9.9 +7.1,0.63,0.06,2.0,0.083,8.0,29.0,0.99855,3.67,0.73,9.6 +7.2,0.65,0.02,2.3,0.094,5.0,31.0,0.9993,3.67,0.8,9.7 +6.9,0.67,0.06,2.1,0.08,8.0,33.0,0.99845,3.68,0.71,9.6 +7.5,0.53,0.06,2.6,0.086,20.0,44.0,0.9965,3.38,0.59,10.7 +11.1,0.18,0.48,1.5,0.068,7.0,15.0,0.9973,3.22,0.64,10.1 +8.3,0.705,0.12,2.6,0.092,12.0,28.0,0.9994,3.51,0.72,10.0 +7.4,0.67,0.12,1.6,0.186,5.0,21.0,0.996,3.39,0.54,9.5 +8.4,0.65,0.6,2.1,0.112,12.0,90.0,0.9973,3.2,0.52,9.2 +10.3,0.53,0.48,2.5,0.063,6.0,25.0,0.9998,3.12,0.59,9.3 +7.6,0.62,0.32,2.2,0.082,7.0,54.0,0.9966,3.36,0.52,9.4 +10.3,0.41,0.42,2.4,0.213,6.0,14.0,0.9994,3.19,0.62,9.5 +10.3,0.43,0.44,2.4,0.214,5.0,12.0,0.9994,3.19,0.63,9.5 +7.4,0.29,0.38,1.7,0.062,9.0,30.0,0.9968,3.41,0.53,9.5 +10.3,0.53,0.48,2.5,0.063,6.0,25.0,0.9998,3.12,0.59,9.3 +7.9,0.53,0.24,2.0,0.072,15.0,105.0,0.996,3.27,0.54,9.4 +9.0,0.46,0.31,2.8,0.093,19.0,98.0,0.99815,3.32,0.63,9.5 +8.6,0.47,0.3,3.0,0.076,30.0,135.0,0.9976,3.3,0.53,9.4 +7.4,0.36,0.29,2.6,0.087,26.0,72.0,0.99645,3.39,0.68,11.0 +7.1,0.35,0.29,2.5,0.096,20.0,53.0,0.9962,3.42,0.65,11.0 +9.6,0.56,0.23,3.4,0.102,37.0,92.0,0.9996,3.3,0.65,10.1 +9.6,0.77,0.12,2.9,0.082,30.0,74.0,0.99865,3.3,0.64,10.4 +9.8,0.66,0.39,3.2,0.083,21.0,59.0,0.9989,3.37,0.71,11.5 +9.6,0.77,0.12,2.9,0.082,30.0,74.0,0.99865,3.3,0.64,10.4 +9.8,0.66,0.39,3.2,0.083,21.0,59.0,0.9989,3.37,0.71,11.5 +9.3,0.61,0.26,3.4,0.09,25.0,87.0,0.99975,3.24,0.62,9.7 +7.8,0.62,0.05,2.3,0.079,6.0,18.0,0.99735,3.29,0.63,9.3 +10.3,0.59,0.42,2.8,0.09,35.0,73.0,0.999,3.28,0.7,9.5 +10.0,0.49,0.2,11.0,0.071,13.0,50.0,1.0015,3.16,0.69,9.2 +10.0,0.49,0.2,11.0,0.071,13.0,50.0,1.0015,3.16,0.69,9.2 +11.6,0.53,0.66,3.65,0.121,6.0,14.0,0.9978,3.05,0.74,11.5 +10.3,0.44,0.5,4.5,0.107,5.0,13.0,0.998,3.28,0.83,11.5 +13.4,0.27,0.62,2.6,0.082,6.0,21.0,1.0002,3.16,0.67,9.7 +10.7,0.46,0.39,2.0,0.061,7.0,15.0,0.9981,3.18,0.62,9.5 +10.2,0.36,0.64,2.9,0.122,10.0,41.0,0.998,3.23,0.66,12.5 +10.2,0.36,0.64,2.9,0.122,10.0,41.0,0.998,3.23,0.66,12.5 +8.0,0.58,0.28,3.2,0.066,21.0,114.0,0.9973,3.22,0.54,9.4 +8.4,0.56,0.08,2.1,0.105,16.0,44.0,0.9958,3.13,0.52,11.0 +7.9,0.65,0.01,2.5,0.078,17.0,38.0,0.9963,3.34,0.74,11.7 +11.9,0.695,0.53,3.4,0.128,7.0,21.0,0.9992,3.17,0.84,12.2 +8.9,0.43,0.45,1.9,0.052,6.0,16.0,0.9948,3.35,0.7,12.5 +7.8,0.43,0.32,2.8,0.08,29.0,58.0,0.9974,3.31,0.64,10.3 +12.4,0.49,0.58,3.0,0.103,28.0,99.0,1.0008,3.16,1.0,11.5 +12.5,0.28,0.54,2.3,0.082,12.0,29.0,0.9997,3.11,1.36,9.8 +12.2,0.34,0.5,2.4,0.066,10.0,21.0,1.0,3.12,1.18,9.2 +10.6,0.42,0.48,2.7,0.065,5.0,18.0,0.9972,3.21,0.87,11.3 +10.9,0.39,0.47,1.8,0.118,6.0,14.0,0.9982,3.3,0.75,9.8 +10.9,0.39,0.47,1.8,0.118,6.0,14.0,0.9982,3.3,0.75,9.8 +11.9,0.57,0.5,2.6,0.082,6.0,32.0,1.0006,3.12,0.78,10.7 +7.0,0.685,0.0,1.9,0.067,40.0,63.0,0.9979,3.6,0.81,9.9 +6.6,0.815,0.02,2.7,0.072,17.0,34.0,0.9955,3.58,0.89,12.3 +13.8,0.49,0.67,3.0,0.093,6.0,15.0,0.9986,3.02,0.93,12.0 +9.6,0.56,0.31,2.8,0.089,15.0,46.0,0.9979,3.11,0.92,10.0 +9.1,0.785,0.0,2.6,0.093,11.0,28.0,0.9994,3.36,0.86,9.4 +10.7,0.67,0.22,2.7,0.107,17.0,34.0,1.0004,3.28,0.98,9.9 +9.1,0.795,0.0,2.6,0.096,11.0,26.0,0.9994,3.35,0.83,9.4 +7.7,0.665,0.0,2.4,0.09,8.0,19.0,0.9974,3.27,0.73,9.3 +13.5,0.53,0.79,4.8,0.12,23.0,77.0,1.0018,3.18,0.77,13.0 +6.1,0.21,0.4,1.4,0.066,40.5,165.0,0.9912,3.25,0.59,11.9 +6.7,0.75,0.01,2.4,0.078,17.0,32.0,0.9955,3.55,0.61,12.8 +11.5,0.41,0.52,3.0,0.08,29.0,55.0,1.0001,3.26,0.88,11.0 +10.5,0.42,0.66,2.95,0.116,12.0,29.0,0.997,3.24,0.75,11.7 +11.9,0.43,0.66,3.1,0.109,10.0,23.0,1.0,3.15,0.85,10.4 +12.6,0.38,0.66,2.6,0.088,10.0,41.0,1.001,3.17,0.68,9.8 +8.2,0.7,0.23,2.0,0.099,14.0,81.0,0.9973,3.19,0.7,9.4 +8.6,0.45,0.31,2.6,0.086,21.0,50.0,0.9982,3.37,0.91,9.9 +11.9,0.58,0.66,2.5,0.072,6.0,37.0,0.9992,3.05,0.56,10.0 +12.5,0.46,0.63,2.0,0.071,6.0,15.0,0.9988,2.99,0.87,10.2 +12.8,0.615,0.66,5.8,0.083,7.0,42.0,1.0022,3.07,0.73,10.0 +10.0,0.42,0.5,3.4,0.107,7.0,21.0,0.9979,3.26,0.93,11.8 +12.8,0.615,0.66,5.8,0.083,7.0,42.0,1.0022,3.07,0.73,10.0 +10.4,0.575,0.61,2.6,0.076,11.0,24.0,1.0,3.16,0.69,9.0 +10.3,0.34,0.52,2.8,0.159,15.0,75.0,0.9998,3.18,0.64,9.4 +9.4,0.27,0.53,2.4,0.074,6.0,18.0,0.9962,3.2,1.13,12.0 +6.9,0.765,0.02,2.3,0.063,35.0,63.0,0.9975,3.57,0.78,9.9 +7.9,0.24,0.4,1.6,0.056,11.0,25.0,0.9967,3.32,0.87,8.7 +9.1,0.28,0.48,1.8,0.067,26.0,46.0,0.9967,3.32,1.04,10.6 +7.4,0.55,0.22,2.2,0.106,12.0,72.0,0.9959,3.05,0.63,9.2 +14.0,0.41,0.63,3.8,0.089,6.0,47.0,1.0014,3.01,0.81,10.8 +11.5,0.54,0.71,4.4,0.124,6.0,15.0,0.9984,3.01,0.83,11.8 +11.5,0.45,0.5,3.0,0.078,19.0,47.0,1.0003,3.26,1.11,11.0 +9.4,0.27,0.53,2.4,0.074,6.0,18.0,0.9962,3.2,1.13,12.0 +11.4,0.625,0.66,6.2,0.088,6.0,24.0,0.9988,3.11,0.99,13.3 +8.3,0.42,0.38,2.5,0.094,24.0,60.0,0.9979,3.31,0.7,10.8 +8.3,0.26,0.42,2.0,0.08,11.0,27.0,0.9974,3.21,0.8,9.4 +13.7,0.415,0.68,2.9,0.085,17.0,43.0,1.0014,3.06,0.8,10.0 +8.3,0.26,0.42,2.0,0.08,11.0,27.0,0.9974,3.21,0.8,9.4 +8.3,0.26,0.42,2.0,0.08,11.0,27.0,0.9974,3.21,0.8,9.4 +7.7,0.51,0.28,2.1,0.087,23.0,54.0,0.998,3.42,0.74,9.2 +7.4,0.63,0.07,2.4,0.09,11.0,37.0,0.9979,3.43,0.76,9.7 +7.8,0.54,0.26,2.0,0.088,23.0,48.0,0.9981,3.41,0.74,9.2 +8.3,0.66,0.15,1.9,0.079,17.0,42.0,0.9972,3.31,0.54,9.6 +7.8,0.46,0.26,1.9,0.088,23.0,53.0,0.9981,3.43,0.74,9.2 +9.6,0.38,0.31,2.5,0.096,16.0,49.0,0.9982,3.19,0.7,10.0 +5.6,0.85,0.05,1.4,0.045,12.0,88.0,0.9924,3.56,0.82,12.9 +13.7,0.415,0.68,2.9,0.085,17.0,43.0,1.0014,3.06,0.8,10.0 +9.5,0.37,0.52,2.0,0.082,6.0,26.0,0.998,3.18,0.51,9.5 +8.4,0.665,0.61,2.0,0.112,13.0,95.0,0.997,3.16,0.54,9.1 +12.7,0.6,0.65,2.3,0.063,6.0,25.0,0.9997,3.03,0.57,9.9 +12.0,0.37,0.76,4.2,0.066,7.0,38.0,1.0004,3.22,0.6,13.0 +6.6,0.735,0.02,7.9,0.122,68.0,124.0,0.9994,3.47,0.53,9.9 +11.5,0.59,0.59,2.6,0.087,13.0,49.0,0.9988,3.18,0.65,11.0 +11.5,0.59,0.59,2.6,0.087,13.0,49.0,0.9988,3.18,0.65,11.0 +8.7,0.765,0.22,2.3,0.064,9.0,42.0,0.9963,3.1,0.55,9.4 +6.6,0.735,0.02,7.9,0.122,68.0,124.0,0.9994,3.47,0.53,9.9 +7.7,0.26,0.3,1.7,0.059,20.0,38.0,0.9949,3.29,0.47,10.8 +12.2,0.48,0.54,2.6,0.085,19.0,64.0,1.0,3.1,0.61,10.5 +11.4,0.6,0.49,2.7,0.085,10.0,41.0,0.9994,3.15,0.63,10.5 +7.7,0.69,0.05,2.7,0.075,15.0,27.0,0.9974,3.26,0.61,9.1 +8.7,0.31,0.46,1.4,0.059,11.0,25.0,0.9966,3.36,0.76,10.1 +9.8,0.44,0.47,2.5,0.063,9.0,28.0,0.9981,3.24,0.65,10.8 +12.0,0.39,0.66,3.0,0.093,12.0,30.0,0.9996,3.18,0.63,10.8 +10.4,0.34,0.58,3.7,0.174,6.0,16.0,0.997,3.19,0.7,11.3 +12.5,0.46,0.49,4.5,0.07,26.0,49.0,0.9981,3.05,0.57,9.6 +9.0,0.43,0.34,2.5,0.08,26.0,86.0,0.9987,3.38,0.62,9.5 +9.1,0.45,0.35,2.4,0.08,23.0,78.0,0.9987,3.38,0.62,9.5 +7.1,0.735,0.16,1.9,0.1,15.0,77.0,0.9966,3.27,0.64,9.3 +9.9,0.4,0.53,6.7,0.097,6.0,19.0,0.9986,3.27,0.82,11.7 +8.8,0.52,0.34,2.7,0.087,24.0,122.0,0.9982,3.26,0.61,9.5 +8.6,0.725,0.24,6.6,0.117,31.0,134.0,1.0014,3.32,1.07,9.3 +10.6,0.48,0.64,2.2,0.111,6.0,20.0,0.997,3.26,0.66,11.7 +7.0,0.58,0.12,1.9,0.091,34.0,124.0,0.9956,3.44,0.48,10.5 +11.9,0.38,0.51,2.0,0.121,7.0,20.0,0.9996,3.24,0.76,10.4 +6.8,0.77,0.0,1.8,0.066,34.0,52.0,0.9976,3.62,0.68,9.9 +9.5,0.56,0.33,2.4,0.089,35.0,67.0,0.9972,3.28,0.73,11.8 +6.6,0.84,0.03,2.3,0.059,32.0,48.0,0.9952,3.52,0.56,12.3 +7.7,0.96,0.2,2.0,0.047,15.0,60.0,0.9955,3.36,0.44,10.9 +10.5,0.24,0.47,2.1,0.066,6.0,24.0,0.9978,3.15,0.9,11.0 +7.7,0.96,0.2,2.0,0.047,15.0,60.0,0.9955,3.36,0.44,10.9 +6.6,0.84,0.03,2.3,0.059,32.0,48.0,0.9952,3.52,0.56,12.3 +6.4,0.67,0.08,2.1,0.045,19.0,48.0,0.9949,3.49,0.49,11.4 +9.5,0.78,0.22,1.9,0.077,6.0,32.0,0.9988,3.26,0.56,10.6 +9.1,0.52,0.33,1.3,0.07,9.0,30.0,0.9978,3.24,0.6,9.3 +12.8,0.84,0.63,2.4,0.088,13.0,35.0,0.9997,3.1,0.6,10.4 +10.5,0.24,0.47,2.1,0.066,6.0,24.0,0.9978,3.15,0.9,11.0 +7.8,0.55,0.35,2.2,0.074,21.0,66.0,0.9974,3.25,0.56,9.2 +11.9,0.37,0.69,2.3,0.078,12.0,24.0,0.9958,3.0,0.65,12.8 +12.3,0.39,0.63,2.3,0.091,6.0,18.0,1.0004,3.16,0.49,9.5 +10.4,0.41,0.55,3.2,0.076,22.0,54.0,0.9996,3.15,0.89,9.9 +12.3,0.39,0.63,2.3,0.091,6.0,18.0,1.0004,3.16,0.49,9.5 +8.0,0.67,0.3,2.0,0.06,38.0,62.0,0.9958,3.26,0.56,10.2 +11.1,0.45,0.73,3.2,0.066,6.0,22.0,0.9986,3.17,0.66,11.2 +10.4,0.41,0.55,3.2,0.076,22.0,54.0,0.9996,3.15,0.89,9.9 +7.0,0.62,0.18,1.5,0.062,7.0,50.0,0.9951,3.08,0.6,9.3 +12.6,0.31,0.72,2.2,0.072,6.0,29.0,0.9987,2.88,0.82,9.8 +11.9,0.4,0.65,2.15,0.068,7.0,27.0,0.9988,3.06,0.68,11.3 +15.6,0.685,0.76,3.7,0.1,6.0,43.0,1.0032,2.95,0.68,11.2 +10.0,0.44,0.49,2.7,0.077,11.0,19.0,0.9963,3.23,0.63,11.6 +5.3,0.57,0.01,1.7,0.054,5.0,27.0,0.9934,3.57,0.84,12.5 +9.5,0.735,0.1,2.1,0.079,6.0,31.0,0.9986,3.23,0.56,10.1 +12.5,0.38,0.6,2.6,0.081,31.0,72.0,0.9996,3.1,0.73,10.5 +9.3,0.48,0.29,2.1,0.127,6.0,16.0,0.9968,3.22,0.72,11.2 +8.6,0.53,0.22,2.0,0.1,7.0,27.0,0.9967,3.2,0.56,10.2 +11.9,0.39,0.69,2.8,0.095,17.0,35.0,0.9994,3.1,0.61,10.8 +11.9,0.39,0.69,2.8,0.095,17.0,35.0,0.9994,3.1,0.61,10.8 +8.4,0.37,0.53,1.8,0.413,9.0,26.0,0.9979,3.06,1.06,9.1 +6.8,0.56,0.03,1.7,0.084,18.0,35.0,0.9968,3.44,0.63,10.0 +10.4,0.33,0.63,2.8,0.084,5.0,22.0,0.9998,3.26,0.74,11.2 +7.0,0.23,0.4,1.6,0.063,21.0,67.0,0.9952,3.5,0.63,11.1 +11.3,0.62,0.67,5.2,0.086,6.0,19.0,0.9988,3.22,0.69,13.4 +8.9,0.59,0.39,2.3,0.095,5.0,22.0,0.9986,3.37,0.58,10.3 +9.2,0.63,0.21,2.7,0.097,29.0,65.0,0.9988,3.28,0.58,9.6 +10.4,0.33,0.63,2.8,0.084,5.0,22.0,0.9998,3.26,0.74,11.2 +11.6,0.58,0.66,2.2,0.074,10.0,47.0,1.0008,3.25,0.57,9.0 +9.2,0.43,0.52,2.3,0.083,14.0,23.0,0.9976,3.35,0.61,11.3 +8.3,0.615,0.22,2.6,0.087,6.0,19.0,0.9982,3.26,0.61,9.3 +11.0,0.26,0.68,2.55,0.085,10.0,25.0,0.997,3.18,0.61,11.8 +8.1,0.66,0.7,2.2,0.098,25.0,129.0,0.9972,3.08,0.53,9.0 +11.5,0.315,0.54,2.1,0.084,5.0,15.0,0.9987,2.98,0.7,9.2 +10.0,0.29,0.4,2.9,0.098,10.0,26.0,1.0006,3.48,0.91,9.7 +10.3,0.5,0.42,2.0,0.069,21.0,51.0,0.9982,3.16,0.72,11.5 +8.8,0.46,0.45,2.6,0.065,7.0,18.0,0.9947,3.32,0.79,14.0 +11.4,0.36,0.69,2.1,0.09,6.0,21.0,1.0,3.17,0.62,9.2 +8.7,0.82,0.02,1.2,0.07,36.0,48.0,0.9952,3.2,0.58,9.8 +13.0,0.32,0.65,2.6,0.093,15.0,47.0,0.9996,3.05,0.61,10.6 +9.6,0.54,0.42,2.4,0.081,25.0,52.0,0.997,3.2,0.71,11.4 +12.5,0.37,0.55,2.6,0.083,25.0,68.0,0.9995,3.15,0.82,10.4 +9.9,0.35,0.55,2.1,0.062,5.0,14.0,0.9971,3.26,0.79,10.6 +10.5,0.28,0.51,1.7,0.08,10.0,24.0,0.9982,3.2,0.89,9.4 +9.6,0.68,0.24,2.2,0.087,5.0,28.0,0.9988,3.14,0.6,10.2 +9.3,0.27,0.41,2.0,0.091,6.0,16.0,0.998,3.28,0.7,9.7 +10.4,0.24,0.49,1.8,0.075,6.0,20.0,0.9977,3.18,1.06,11.0 +9.6,0.68,0.24,2.2,0.087,5.0,28.0,0.9988,3.14,0.6,10.2 +9.4,0.685,0.11,2.7,0.077,6.0,31.0,0.9984,3.19,0.7,10.1 +10.6,0.28,0.39,15.5,0.069,6.0,23.0,1.0026,3.12,0.66,9.2 +9.4,0.3,0.56,2.8,0.08,6.0,17.0,0.9964,3.15,0.92,11.7 +10.6,0.36,0.59,2.2,0.152,6.0,18.0,0.9986,3.04,1.05,9.4 +10.6,0.36,0.6,2.2,0.152,7.0,18.0,0.9986,3.04,1.06,9.4 +10.6,0.44,0.68,4.1,0.114,6.0,24.0,0.997,3.06,0.66,13.4 +10.2,0.67,0.39,1.9,0.054,6.0,17.0,0.9976,3.17,0.47,10.0 +10.2,0.67,0.39,1.9,0.054,6.0,17.0,0.9976,3.17,0.47,10.0 +10.2,0.645,0.36,1.8,0.053,5.0,14.0,0.9982,3.17,0.42,10.0 +11.6,0.32,0.55,2.8,0.081,35.0,67.0,1.0002,3.32,0.92,10.8 +9.3,0.39,0.4,2.6,0.073,10.0,26.0,0.9984,3.34,0.75,10.2 +9.3,0.775,0.27,2.8,0.078,24.0,56.0,0.9984,3.31,0.67,10.6 +9.2,0.41,0.5,2.5,0.055,12.0,25.0,0.9952,3.34,0.79,13.3 +8.9,0.4,0.51,2.6,0.052,13.0,27.0,0.995,3.32,0.9,13.4 +8.7,0.69,0.31,3.0,0.086,23.0,81.0,1.0002,3.48,0.74,11.6 +6.5,0.39,0.23,8.3,0.051,28.0,91.0,0.9952,3.44,0.55,12.1 +10.7,0.35,0.53,2.6,0.07,5.0,16.0,0.9972,3.15,0.65,11.0 +7.8,0.52,0.25,1.9,0.081,14.0,38.0,0.9984,3.43,0.65,9.0 +7.2,0.34,0.32,2.5,0.09,43.0,113.0,0.9966,3.32,0.79,11.1 +10.7,0.35,0.53,2.6,0.07,5.0,16.0,0.9972,3.15,0.65,11.0 +8.7,0.69,0.31,3.0,0.086,23.0,81.0,1.0002,3.48,0.74,11.6 +7.8,0.52,0.25,1.9,0.081,14.0,38.0,0.9984,3.43,0.65,9.0 +10.4,0.44,0.73,6.55,0.074,38.0,76.0,0.999,3.17,0.85,12.0 +10.4,0.44,0.73,6.55,0.074,38.0,76.0,0.999,3.17,0.85,12.0 +10.5,0.26,0.47,1.9,0.078,6.0,24.0,0.9976,3.18,1.04,10.9 +10.5,0.24,0.42,1.8,0.077,6.0,22.0,0.9976,3.21,1.05,10.8 +10.2,0.49,0.63,2.9,0.072,10.0,26.0,0.9968,3.16,0.78,12.5 +10.4,0.24,0.46,1.8,0.075,6.0,21.0,0.9976,3.25,1.02,10.8 +11.2,0.67,0.55,2.3,0.084,6.0,13.0,1.0,3.17,0.71,9.5 +10.0,0.59,0.31,2.2,0.09,26.0,62.0,0.9994,3.18,0.63,10.2 +13.3,0.29,0.75,2.8,0.084,23.0,43.0,0.9986,3.04,0.68,11.4 +12.4,0.42,0.49,4.6,0.073,19.0,43.0,0.9978,3.02,0.61,9.5 +10.0,0.59,0.31,2.2,0.09,26.0,62.0,0.9994,3.18,0.63,10.2 +10.7,0.4,0.48,2.1,0.125,15.0,49.0,0.998,3.03,0.81,9.7 +10.5,0.51,0.64,2.4,0.107,6.0,15.0,0.9973,3.09,0.66,11.8 +10.5,0.51,0.64,2.4,0.107,6.0,15.0,0.9973,3.09,0.66,11.8 +8.5,0.655,0.49,6.1,0.122,34.0,151.0,1.001,3.31,1.14,9.3 +12.5,0.6,0.49,4.3,0.1,5.0,14.0,1.001,3.25,0.74,11.9 +10.4,0.61,0.49,2.1,0.2,5.0,16.0,0.9994,3.16,0.63,8.4 +10.9,0.21,0.49,2.8,0.088,11.0,32.0,0.9972,3.22,0.68,11.7 +7.3,0.365,0.49,2.5,0.088,39.0,106.0,0.9966,3.36,0.78,11.0 +9.8,0.25,0.49,2.7,0.088,15.0,33.0,0.9982,3.42,0.9,10.0 +7.6,0.41,0.49,2.0,0.088,16.0,43.0,0.998,3.48,0.64,9.1 +8.2,0.39,0.49,2.3,0.099,47.0,133.0,0.9979,3.38,0.99,9.8 +9.3,0.4,0.49,2.5,0.085,38.0,142.0,0.9978,3.22,0.55,9.4 +9.2,0.43,0.49,2.4,0.086,23.0,116.0,0.9976,3.23,0.64,9.5 +10.4,0.64,0.24,2.8,0.105,29.0,53.0,0.9998,3.24,0.67,9.9 +7.3,0.365,0.49,2.5,0.088,39.0,106.0,0.9966,3.36,0.78,11.0 +7.0,0.38,0.49,2.5,0.097,33.0,85.0,0.9962,3.39,0.77,11.4 +8.2,0.42,0.49,2.6,0.084,32.0,55.0,0.9988,3.34,0.75,8.7 +9.9,0.63,0.24,2.4,0.077,6.0,33.0,0.9974,3.09,0.57,9.4 +9.1,0.22,0.24,2.1,0.078,1.0,28.0,0.999,3.41,0.87,10.3 +11.9,0.38,0.49,2.7,0.098,12.0,42.0,1.0004,3.16,0.61,10.3 +11.9,0.38,0.49,2.7,0.098,12.0,42.0,1.0004,3.16,0.61,10.3 +10.3,0.27,0.24,2.1,0.072,15.0,33.0,0.9956,3.22,0.66,12.8 +10.0,0.48,0.24,2.7,0.102,13.0,32.0,1.0,3.28,0.56,10.0 +9.1,0.22,0.24,2.1,0.078,1.0,28.0,0.999,3.41,0.87,10.3 +9.9,0.63,0.24,2.4,0.077,6.0,33.0,0.9974,3.09,0.57,9.4 +8.1,0.825,0.24,2.1,0.084,5.0,13.0,0.9972,3.37,0.77,10.7 +12.9,0.35,0.49,5.8,0.066,5.0,35.0,1.0014,3.2,0.66,12.0 +11.2,0.5,0.74,5.15,0.1,5.0,17.0,0.9996,3.22,0.62,11.2 +9.2,0.59,0.24,3.3,0.101,20.0,47.0,0.9988,3.26,0.67,9.6 +9.5,0.46,0.49,6.3,0.064,5.0,17.0,0.9988,3.21,0.73,11.0 +9.3,0.715,0.24,2.1,0.07,5.0,20.0,0.9966,3.12,0.59,9.9 +11.2,0.66,0.24,2.5,0.085,16.0,53.0,0.9993,3.06,0.72,11.0 +14.3,0.31,0.74,1.8,0.075,6.0,15.0,1.0008,2.86,0.79,8.4 +9.1,0.47,0.49,2.6,0.094,38.0,106.0,0.9982,3.08,0.59,9.1 +7.5,0.55,0.24,2.0,0.078,10.0,28.0,0.9983,3.45,0.78,9.5 +10.6,0.31,0.49,2.5,0.067,6.0,21.0,0.9987,3.26,0.86,10.7 +12.4,0.35,0.49,2.6,0.079,27.0,69.0,0.9994,3.12,0.75,10.4 +9.0,0.53,0.49,1.9,0.171,6.0,25.0,0.9975,3.27,0.61,9.4 +6.8,0.51,0.01,2.1,0.074,9.0,25.0,0.9958,3.33,0.56,9.5 +9.4,0.43,0.24,2.8,0.092,14.0,45.0,0.998,3.19,0.73,10.0 +9.5,0.46,0.24,2.7,0.092,14.0,44.0,0.998,3.12,0.74,10.0 +5.0,1.04,0.24,1.6,0.05,32.0,96.0,0.9934,3.74,0.62,11.5 +15.5,0.645,0.49,4.2,0.095,10.0,23.0,1.00315,2.92,0.74,11.1 +15.5,0.645,0.49,4.2,0.095,10.0,23.0,1.00315,2.92,0.74,11.1 +10.9,0.53,0.49,4.6,0.118,10.0,17.0,1.0002,3.07,0.56,11.7 +15.6,0.645,0.49,4.2,0.095,10.0,23.0,1.00315,2.92,0.74,11.1 +10.9,0.53,0.49,4.6,0.118,10.0,17.0,1.0002,3.07,0.56,11.7 +13.0,0.47,0.49,4.3,0.085,6.0,47.0,1.0021,3.3,0.68,12.7 +12.7,0.6,0.49,2.8,0.075,5.0,19.0,0.9994,3.14,0.57,11.4 +9.0,0.44,0.49,2.4,0.078,26.0,121.0,0.9978,3.23,0.58,9.2 +9.0,0.54,0.49,2.9,0.094,41.0,110.0,0.9982,3.08,0.61,9.2 +7.6,0.29,0.49,2.7,0.092,25.0,60.0,0.9971,3.31,0.61,10.1 +13.0,0.47,0.49,4.3,0.085,6.0,47.0,1.0021,3.3,0.68,12.7 +12.7,0.6,0.49,2.8,0.075,5.0,19.0,0.9994,3.14,0.57,11.4 +8.7,0.7,0.24,2.5,0.226,5.0,15.0,0.9991,3.32,0.6,9.0 +8.7,0.7,0.24,2.5,0.226,5.0,15.0,0.9991,3.32,0.6,9.0 +9.8,0.5,0.49,2.6,0.25,5.0,20.0,0.999,3.31,0.79,10.7 +6.2,0.36,0.24,2.2,0.095,19.0,42.0,0.9946,3.57,0.57,11.7 +11.5,0.35,0.49,3.3,0.07,10.0,37.0,1.0003,3.32,0.91,11.0 +6.2,0.36,0.24,2.2,0.095,19.0,42.0,0.9946,3.57,0.57,11.7 +10.2,0.24,0.49,2.4,0.075,10.0,28.0,0.9978,3.14,0.61,10.4 +10.5,0.59,0.49,2.1,0.07,14.0,47.0,0.9991,3.3,0.56,9.6 +10.6,0.34,0.49,3.2,0.078,20.0,78.0,0.9992,3.19,0.7,10.0 +12.3,0.27,0.49,3.1,0.079,28.0,46.0,0.9993,3.2,0.8,10.2 +9.9,0.5,0.24,2.3,0.103,6.0,14.0,0.9978,3.34,0.52,10.0 +8.8,0.44,0.49,2.8,0.083,18.0,111.0,0.9982,3.3,0.6,9.5 +8.8,0.47,0.49,2.9,0.085,17.0,110.0,0.9982,3.29,0.6,9.8 +10.6,0.31,0.49,2.2,0.063,18.0,40.0,0.9976,3.14,0.51,9.8 +12.3,0.5,0.49,2.2,0.089,5.0,14.0,1.0002,3.19,0.44,9.6 +12.3,0.5,0.49,2.2,0.089,5.0,14.0,1.0002,3.19,0.44,9.6 +11.7,0.49,0.49,2.2,0.083,5.0,15.0,1.0,3.19,0.43,9.2 +12.0,0.28,0.49,1.9,0.074,10.0,21.0,0.9976,2.98,0.66,9.9 +11.8,0.33,0.49,3.4,0.093,54.0,80.0,1.0002,3.3,0.76,10.7 +7.6,0.51,0.24,2.4,0.091,8.0,38.0,0.998,3.47,0.66,9.6 +11.1,0.31,0.49,2.7,0.094,16.0,47.0,0.9986,3.12,1.02,10.6 +7.3,0.73,0.24,1.9,0.108,18.0,102.0,0.9967,3.26,0.59,9.3 +5.0,0.42,0.24,2.0,0.06,19.0,50.0,0.9917,3.72,0.74,14.0 +10.2,0.29,0.49,2.6,0.059,5.0,13.0,0.9976,3.05,0.74,10.5 +9.0,0.45,0.49,2.6,0.084,21.0,75.0,0.9987,3.35,0.57,9.7 +6.6,0.39,0.49,1.7,0.07,23.0,149.0,0.9922,3.12,0.5,11.5 +9.0,0.45,0.49,2.6,0.084,21.0,75.0,0.9987,3.35,0.57,9.7 +9.9,0.49,0.58,3.5,0.094,9.0,43.0,1.0004,3.29,0.58,9.0 +7.9,0.72,0.17,2.6,0.096,20.0,38.0,0.9978,3.4,0.53,9.5 +8.9,0.595,0.41,7.9,0.086,30.0,109.0,0.9998,3.27,0.57,9.3 +12.4,0.4,0.51,2.0,0.059,6.0,24.0,0.9994,3.04,0.6,9.3 +11.9,0.58,0.58,1.9,0.071,5.0,18.0,0.998,3.09,0.63,10.0 +8.5,0.585,0.18,2.1,0.078,5.0,30.0,0.9967,3.2,0.48,9.8 +12.7,0.59,0.45,2.3,0.082,11.0,22.0,1.0,3.0,0.7,9.3 +8.2,0.915,0.27,2.1,0.088,7.0,23.0,0.9962,3.26,0.47,10.0 +13.2,0.46,0.52,2.2,0.071,12.0,35.0,1.0006,3.1,0.56,9.0 +7.7,0.835,0.0,2.6,0.081,6.0,14.0,0.9975,3.3,0.52,9.3 +13.2,0.46,0.52,2.2,0.071,12.0,35.0,1.0006,3.1,0.56,9.0 +8.3,0.58,0.13,2.9,0.096,14.0,63.0,0.9984,3.17,0.62,9.1 +8.3,0.6,0.13,2.6,0.085,6.0,24.0,0.9984,3.31,0.59,9.2 +9.4,0.41,0.48,4.6,0.072,10.0,20.0,0.9973,3.34,0.79,12.2 +8.8,0.48,0.41,3.3,0.092,26.0,52.0,0.9982,3.31,0.53,10.5 +10.1,0.65,0.37,5.1,0.11,11.0,65.0,1.0026,3.32,0.64,10.4 +6.3,0.36,0.19,3.2,0.075,15.0,39.0,0.9956,3.56,0.52,12.7 +8.8,0.24,0.54,2.5,0.083,25.0,57.0,0.9983,3.39,0.54,9.2 +13.2,0.38,0.55,2.7,0.081,5.0,16.0,1.0006,2.98,0.54,9.4 +7.5,0.64,0.0,2.4,0.077,18.0,29.0,0.9965,3.32,0.6,10.0 +8.2,0.39,0.38,1.5,0.058,10.0,29.0,0.9962,3.26,0.74,9.8 +9.2,0.755,0.18,2.2,0.148,10.0,103.0,0.9969,2.87,1.36,10.2 +9.6,0.6,0.5,2.3,0.079,28.0,71.0,0.9997,3.5,0.57,9.7 +9.6,0.6,0.5,2.3,0.079,28.0,71.0,0.9997,3.5,0.57,9.7 +11.5,0.31,0.51,2.2,0.079,14.0,28.0,0.9982,3.03,0.93,9.8 +11.4,0.46,0.5,2.7,0.122,4.0,17.0,1.0006,3.13,0.7,10.2 +11.3,0.37,0.41,2.3,0.088,6.0,16.0,0.9988,3.09,0.8,9.3 +8.3,0.54,0.24,3.4,0.076,16.0,112.0,0.9976,3.27,0.61,9.4 +8.2,0.56,0.23,3.4,0.078,14.0,104.0,0.9976,3.28,0.62,9.4 +10.0,0.58,0.22,1.9,0.08,9.0,32.0,0.9974,3.13,0.55,9.5 +7.9,0.51,0.25,2.9,0.077,21.0,45.0,0.9974,3.49,0.96,12.1 +6.8,0.69,0.0,5.6,0.124,21.0,58.0,0.9997,3.46,0.72,10.2 +6.8,0.69,0.0,5.6,0.124,21.0,58.0,0.9997,3.46,0.72,10.2 +8.8,0.6,0.29,2.2,0.098,5.0,15.0,0.9988,3.36,0.49,9.1 +8.8,0.6,0.29,2.2,0.098,5.0,15.0,0.9988,3.36,0.49,9.1 +8.7,0.54,0.26,2.5,0.097,7.0,31.0,0.9976,3.27,0.6,9.3 +7.6,0.685,0.23,2.3,0.111,20.0,84.0,0.9964,3.21,0.61,9.3 +8.7,0.54,0.26,2.5,0.097,7.0,31.0,0.9976,3.27,0.6,9.3 +10.4,0.28,0.54,2.7,0.105,5.0,19.0,0.9988,3.25,0.63,9.5 +7.6,0.41,0.14,3.0,0.087,21.0,43.0,0.9964,3.32,0.57,10.5 +10.1,0.935,0.22,3.4,0.105,11.0,86.0,1.001,3.43,0.64,11.3 +7.9,0.35,0.21,1.9,0.073,46.0,102.0,0.9964,3.27,0.58,9.5 +8.7,0.84,0.0,1.4,0.065,24.0,33.0,0.9954,3.27,0.55,9.7 +9.6,0.88,0.28,2.4,0.086,30.0,147.0,0.9979,3.24,0.53,9.4 +9.5,0.885,0.27,2.3,0.084,31.0,145.0,0.9978,3.24,0.53,9.4 +7.7,0.915,0.12,2.2,0.143,7.0,23.0,0.9964,3.35,0.65,10.2 +8.9,0.29,0.35,1.9,0.067,25.0,57.0,0.997,3.18,1.36,10.3 +9.9,0.54,0.45,2.3,0.071,16.0,40.0,0.9991,3.39,0.62,9.4 +9.5,0.59,0.44,2.3,0.071,21.0,68.0,0.9992,3.46,0.63,9.5 +9.9,0.54,0.45,2.3,0.071,16.0,40.0,0.9991,3.39,0.62,9.4 +9.5,0.59,0.44,2.3,0.071,21.0,68.0,0.9992,3.46,0.63,9.5 +9.9,0.54,0.45,2.3,0.071,16.0,40.0,0.9991,3.39,0.62,9.4 +7.8,0.64,0.1,6.0,0.115,5.0,11.0,0.9984,3.37,0.69,10.1 +7.3,0.67,0.05,3.6,0.107,6.0,20.0,0.9972,3.4,0.63,10.1 +8.3,0.845,0.01,2.2,0.07,5.0,14.0,0.9967,3.32,0.58,11.0 +8.7,0.48,0.3,2.8,0.066,10.0,28.0,0.9964,3.33,0.67,11.2 +6.7,0.42,0.27,8.6,0.068,24.0,148.0,0.9948,3.16,0.57,11.3 +10.7,0.43,0.39,2.2,0.106,8.0,32.0,0.9986,2.89,0.5,9.6 +9.8,0.88,0.25,2.5,0.104,35.0,155.0,1.001,3.41,0.67,11.2 +15.9,0.36,0.65,7.5,0.096,22.0,71.0,0.9976,2.98,0.84,14.9 +9.4,0.33,0.59,2.8,0.079,9.0,30.0,0.9976,3.12,0.54,12.0 +8.6,0.47,0.47,2.4,0.074,7.0,29.0,0.9979,3.08,0.46,9.5 +9.7,0.55,0.17,2.9,0.087,20.0,53.0,1.0004,3.14,0.61,9.4 +10.7,0.43,0.39,2.2,0.106,8.0,32.0,0.9986,2.89,0.5,9.6 +12.0,0.5,0.59,1.4,0.073,23.0,42.0,0.998,2.92,0.68,10.5 +7.2,0.52,0.07,1.4,0.074,5.0,20.0,0.9973,3.32,0.81,9.6 +7.1,0.84,0.02,4.4,0.096,5.0,13.0,0.997,3.41,0.57,11.0 +7.2,0.52,0.07,1.4,0.074,5.0,20.0,0.9973,3.32,0.81,9.6 +7.5,0.42,0.31,1.6,0.08,15.0,42.0,0.9978,3.31,0.64,9.0 +7.2,0.57,0.06,1.6,0.076,9.0,27.0,0.9972,3.36,0.7,9.6 +10.1,0.28,0.46,1.8,0.05,5.0,13.0,0.9974,3.04,0.79,10.2 +12.1,0.4,0.52,2.0,0.092,15.0,54.0,1.0,3.03,0.66,10.2 +9.4,0.59,0.14,2.0,0.084,25.0,48.0,0.9981,3.14,0.56,9.7 +8.3,0.49,0.36,1.8,0.222,6.0,16.0,0.998,3.18,0.6,9.5 +11.3,0.34,0.45,2.0,0.082,6.0,15.0,0.9988,2.94,0.66,9.2 +10.0,0.73,0.43,2.3,0.059,15.0,31.0,0.9966,3.15,0.57,11.0 +11.3,0.34,0.45,2.0,0.082,6.0,15.0,0.9988,2.94,0.66,9.2 +6.9,0.4,0.24,2.5,0.083,30.0,45.0,0.9959,3.26,0.58,10.0 +8.2,0.73,0.21,1.7,0.074,5.0,13.0,0.9968,3.2,0.52,9.5 +9.8,1.24,0.34,2.0,0.079,32.0,151.0,0.998,3.15,0.53,9.5 +8.2,0.73,0.21,1.7,0.074,5.0,13.0,0.9968,3.2,0.52,9.5 +10.8,0.4,0.41,2.2,0.084,7.0,17.0,0.9984,3.08,0.67,9.3 +9.3,0.41,0.39,2.2,0.064,12.0,31.0,0.9984,3.26,0.65,10.2 +10.8,0.4,0.41,2.2,0.084,7.0,17.0,0.9984,3.08,0.67,9.3 +8.6,0.8,0.11,2.3,0.084,12.0,31.0,0.9979,3.4,0.48,9.9 +8.3,0.78,0.1,2.6,0.081,45.0,87.0,0.9983,3.48,0.53,10.0 +10.8,0.26,0.45,3.3,0.06,20.0,49.0,0.9972,3.13,0.54,9.6 +13.3,0.43,0.58,1.9,0.07,15.0,40.0,1.0004,3.06,0.49,9.0 +8.0,0.45,0.23,2.2,0.094,16.0,29.0,0.9962,3.21,0.49,10.2 +8.5,0.46,0.31,2.25,0.078,32.0,58.0,0.998,3.33,0.54,9.8 +8.1,0.78,0.23,2.6,0.059,5.0,15.0,0.997,3.37,0.56,11.3 +9.8,0.98,0.32,2.3,0.078,35.0,152.0,0.998,3.25,0.48,9.4 +8.1,0.78,0.23,2.6,0.059,5.0,15.0,0.997,3.37,0.56,11.3 +7.1,0.65,0.18,1.8,0.07,13.0,40.0,0.997,3.44,0.6,9.1 +9.1,0.64,0.23,3.1,0.095,13.0,38.0,0.9998,3.28,0.59,9.7 +7.7,0.66,0.04,1.6,0.039,4.0,9.0,0.9962,3.4,0.47,9.4 +8.1,0.38,0.48,1.8,0.157,5.0,17.0,0.9976,3.3,1.05,9.4 +7.4,1.185,0.0,4.25,0.097,5.0,14.0,0.9966,3.63,0.54,10.7 +9.2,0.92,0.24,2.6,0.087,12.0,93.0,0.9998,3.48,0.54,9.8 +8.6,0.49,0.51,2.0,0.422,16.0,62.0,0.9979,3.03,1.17,9.0 +9.0,0.48,0.32,2.8,0.084,21.0,122.0,0.9984,3.32,0.62,9.4 +9.0,0.47,0.31,2.7,0.084,24.0,125.0,0.9984,3.31,0.61,9.4 +5.1,0.47,0.02,1.3,0.034,18.0,44.0,0.9921,3.9,0.62,12.8 +7.0,0.65,0.02,2.1,0.066,8.0,25.0,0.9972,3.47,0.67,9.5 +7.0,0.65,0.02,2.1,0.066,8.0,25.0,0.9972,3.47,0.67,9.5 +9.4,0.615,0.28,3.2,0.087,18.0,72.0,1.0001,3.31,0.53,9.7 +11.8,0.38,0.55,2.1,0.071,5.0,19.0,0.9986,3.11,0.62,10.8 +10.6,1.02,0.43,2.9,0.076,26.0,88.0,0.9984,3.08,0.57,10.1 +7.0,0.65,0.02,2.1,0.066,8.0,25.0,0.9972,3.47,0.67,9.5 +7.0,0.64,0.02,2.1,0.067,9.0,23.0,0.997,3.47,0.67,9.4 +7.5,0.38,0.48,2.6,0.073,22.0,84.0,0.9972,3.32,0.7,9.6 +9.1,0.765,0.04,1.6,0.078,4.0,14.0,0.998,3.29,0.54,9.7 +8.4,1.035,0.15,6.0,0.073,11.0,54.0,0.999,3.37,0.49,9.9 +7.0,0.78,0.08,2.0,0.093,10.0,19.0,0.9956,3.4,0.47,10.0 +7.4,0.49,0.19,3.0,0.077,16.0,37.0,0.9966,3.37,0.51,10.5 +7.8,0.545,0.12,2.5,0.068,11.0,35.0,0.996,3.34,0.61,11.6 +9.7,0.31,0.47,1.6,0.062,13.0,33.0,0.9983,3.27,0.66,10.0 +10.6,1.025,0.43,2.8,0.08,21.0,84.0,0.9985,3.06,0.57,10.1 +8.9,0.565,0.34,3.0,0.093,16.0,112.0,0.9998,3.38,0.61,9.5 +8.7,0.69,0.0,3.2,0.084,13.0,33.0,0.9992,3.36,0.45,9.4 +8.0,0.43,0.36,2.3,0.075,10.0,48.0,0.9976,3.34,0.46,9.4 +9.9,0.74,0.28,2.6,0.078,21.0,77.0,0.998,3.28,0.51,9.8 +7.2,0.49,0.18,2.7,0.069,13.0,34.0,0.9967,3.29,0.48,9.2 +8.0,0.43,0.36,2.3,0.075,10.0,48.0,0.9976,3.34,0.46,9.4 +7.6,0.46,0.11,2.6,0.079,12.0,49.0,0.9968,3.21,0.57,10.0 +8.4,0.56,0.04,2.0,0.082,10.0,22.0,0.9976,3.22,0.44,9.6 +7.1,0.66,0.0,3.9,0.086,17.0,45.0,0.9976,3.46,0.54,9.5 +8.4,0.56,0.04,2.0,0.082,10.0,22.0,0.9976,3.22,0.44,9.6 +8.9,0.48,0.24,2.85,0.094,35.0,106.0,0.9982,3.1,0.53,9.2 +7.6,0.42,0.08,2.7,0.084,15.0,48.0,0.9968,3.21,0.59,10.0 +7.1,0.31,0.3,2.2,0.053,36.0,127.0,0.9965,2.94,1.62,9.5 +7.5,1.115,0.1,3.1,0.086,5.0,12.0,0.9958,3.54,0.6,11.2 +9.0,0.66,0.17,3.0,0.077,5.0,13.0,0.9976,3.29,0.55,10.4 +8.1,0.72,0.09,2.8,0.084,18.0,49.0,0.9994,3.43,0.72,11.1 +6.4,0.57,0.02,1.8,0.067,4.0,11.0,0.997,3.46,0.68,9.5 +6.4,0.57,0.02,1.8,0.067,4.0,11.0,0.997,3.46,0.68,9.5 +6.4,0.865,0.03,3.2,0.071,27.0,58.0,0.995,3.61,0.49,12.7 +9.5,0.55,0.66,2.3,0.387,12.0,37.0,0.9982,3.17,0.67,9.6 +8.9,0.875,0.13,3.45,0.088,4.0,14.0,0.9994,3.44,0.52,11.5 +7.3,0.835,0.03,2.1,0.092,10.0,19.0,0.9966,3.39,0.47,9.6 +7.0,0.45,0.34,2.7,0.082,16.0,72.0,0.998,3.55,0.6,9.5 +7.7,0.56,0.2,2.0,0.075,9.0,39.0,0.9987,3.48,0.62,9.3 +7.7,0.965,0.1,2.1,0.112,11.0,22.0,0.9963,3.26,0.5,9.5 +7.7,0.965,0.1,2.1,0.112,11.0,22.0,0.9963,3.26,0.5,9.5 +8.2,0.59,0.0,2.5,0.093,19.0,58.0,1.0002,3.5,0.65,9.3 +9.0,0.46,0.23,2.8,0.092,28.0,104.0,0.9983,3.1,0.56,9.2 +9.0,0.69,0.0,2.4,0.088,19.0,38.0,0.999,3.35,0.6,9.3 +8.3,0.76,0.29,4.2,0.075,12.0,16.0,0.9965,3.45,0.68,11.5 +9.2,0.53,0.24,2.6,0.078,28.0,139.0,0.99788,3.21,0.57,9.5 +6.5,0.615,0.0,1.9,0.065,9.0,18.0,0.9972,3.46,0.65,9.2 +11.6,0.41,0.58,2.8,0.096,25.0,101.0,1.00024,3.13,0.53,10.0 +11.1,0.39,0.54,2.7,0.095,21.0,101.0,1.0001,3.13,0.51,9.5 +7.3,0.51,0.18,2.1,0.07,12.0,28.0,0.99768,3.52,0.73,9.5 +8.2,0.34,0.38,2.5,0.08,12.0,57.0,0.9978,3.3,0.47,9.0 +8.6,0.33,0.4,2.6,0.083,16.0,68.0,0.99782,3.3,0.48,9.4 +7.2,0.5,0.18,2.1,0.071,12.0,31.0,0.99761,3.52,0.72,9.6 +7.3,0.51,0.18,2.1,0.07,12.0,28.0,0.99768,3.52,0.73,9.5 +8.3,0.65,0.1,2.9,0.089,17.0,40.0,0.99803,3.29,0.55,9.5 +8.3,0.65,0.1,2.9,0.089,17.0,40.0,0.99803,3.29,0.55,9.5 +7.6,0.54,0.13,2.5,0.097,24.0,66.0,0.99785,3.39,0.61,9.4 +8.3,0.65,0.1,2.9,0.089,17.0,40.0,0.99803,3.29,0.55,9.5 +7.8,0.48,0.68,1.7,0.415,14.0,32.0,0.99656,3.09,1.06,9.1 +7.8,0.91,0.07,1.9,0.058,22.0,47.0,0.99525,3.51,0.43,10.7 +6.3,0.98,0.01,2.0,0.057,15.0,33.0,0.99488,3.6,0.46,11.2 +8.1,0.87,0.0,2.2,0.084,10.0,31.0,0.99656,3.25,0.5,9.8 +8.1,0.87,0.0,2.2,0.084,10.0,31.0,0.99656,3.25,0.5,9.8 +8.8,0.42,0.21,2.5,0.092,33.0,88.0,0.99823,3.19,0.52,9.2 +9.0,0.58,0.25,2.8,0.075,9.0,104.0,0.99779,3.23,0.57,9.7 +9.3,0.655,0.26,2.0,0.096,5.0,35.0,0.99738,3.25,0.42,9.6 +8.8,0.7,0.0,1.7,0.069,8.0,19.0,0.99701,3.31,0.53,10.0 +9.3,0.655,0.26,2.0,0.096,5.0,35.0,0.99738,3.25,0.42,9.6 +9.1,0.68,0.11,2.8,0.093,11.0,44.0,0.99888,3.31,0.55,9.5 +9.2,0.67,0.1,3.0,0.091,12.0,48.0,0.99888,3.31,0.54,9.5 +8.8,0.59,0.18,2.9,0.089,12.0,74.0,0.99738,3.14,0.54,9.4 +7.5,0.6,0.32,2.7,0.103,13.0,98.0,0.99938,3.45,0.62,9.5 +7.1,0.59,0.02,2.3,0.082,24.0,94.0,0.99744,3.55,0.53,9.7 +7.9,0.72,0.01,1.9,0.076,7.0,32.0,0.99668,3.39,0.54,9.6 +7.1,0.59,0.02,2.3,0.082,24.0,94.0,0.99744,3.55,0.53,9.7 +9.4,0.685,0.26,2.4,0.082,23.0,143.0,0.9978,3.28,0.55,9.4 +9.5,0.57,0.27,2.3,0.082,23.0,144.0,0.99782,3.27,0.55,9.4 +7.9,0.4,0.29,1.8,0.157,1.0,44.0,0.9973,3.3,0.92,9.5 +7.9,0.4,0.3,1.8,0.157,2.0,45.0,0.99727,3.31,0.91,9.5 +7.2,1.0,0.0,3.0,0.102,7.0,16.0,0.99586,3.43,0.46,10.0 +6.9,0.765,0.18,2.4,0.243,5.5,48.0,0.99612,3.4,0.6,10.3 +6.9,0.635,0.17,2.4,0.241,6.0,18.0,0.9961,3.4,0.59,10.3 +8.3,0.43,0.3,3.4,0.079,7.0,34.0,0.99788,3.36,0.61,10.5 +7.1,0.52,0.03,2.6,0.076,21.0,92.0,0.99745,3.5,0.6,9.8 +7.0,0.57,0.0,2.0,0.19,12.0,45.0,0.99676,3.31,0.6,9.4 +6.5,0.46,0.14,2.4,0.114,9.0,37.0,0.99732,3.66,0.65,9.8 +9.0,0.82,0.05,2.4,0.081,26.0,96.0,0.99814,3.36,0.53,10.0 +6.5,0.46,0.14,2.4,0.114,9.0,37.0,0.99732,3.66,0.65,9.8 +7.1,0.59,0.01,2.5,0.077,20.0,85.0,0.99746,3.55,0.59,9.8 +9.9,0.35,0.41,2.3,0.083,11.0,61.0,0.9982,3.21,0.5,9.5 +9.9,0.35,0.41,2.3,0.083,11.0,61.0,0.9982,3.21,0.5,9.5 +10.0,0.56,0.24,2.2,0.079,19.0,58.0,0.9991,3.18,0.56,10.1 +10.0,0.56,0.24,2.2,0.079,19.0,58.0,0.9991,3.18,0.56,10.1 +8.6,0.63,0.17,2.9,0.099,21.0,119.0,0.998,3.09,0.52,9.3 +7.4,0.37,0.43,2.6,0.082,18.0,82.0,0.99708,3.33,0.68,9.7 +8.8,0.64,0.17,2.9,0.084,25.0,130.0,0.99818,3.23,0.54,9.6 +7.1,0.61,0.02,2.5,0.081,17.0,87.0,0.99745,3.48,0.6,9.7 +7.7,0.6,0.0,2.6,0.055,7.0,13.0,0.99639,3.38,0.56,10.8 +10.1,0.27,0.54,2.3,0.065,7.0,26.0,0.99531,3.17,0.53,12.5 +10.8,0.89,0.3,2.6,0.132,7.0,60.0,0.99786,2.99,1.18,10.2 +8.7,0.46,0.31,2.5,0.126,24.0,64.0,0.99746,3.1,0.74,9.6 +9.3,0.37,0.44,1.6,0.038,21.0,42.0,0.99526,3.24,0.81,10.8 +9.4,0.5,0.34,3.6,0.082,5.0,14.0,0.9987,3.29,0.52,10.7 +9.4,0.5,0.34,3.6,0.082,5.0,14.0,0.9987,3.29,0.52,10.7 +7.2,0.61,0.08,4.0,0.082,26.0,108.0,0.99641,3.25,0.51,9.4 +8.6,0.55,0.09,3.3,0.068,8.0,17.0,0.99735,3.23,0.44,10.0 +5.1,0.585,0.0,1.7,0.044,14.0,86.0,0.99264,3.56,0.94,12.9 +7.7,0.56,0.08,2.5,0.114,14.0,46.0,0.9971,3.24,0.66,9.6 +8.4,0.52,0.22,2.7,0.084,4.0,18.0,0.99682,3.26,0.57,9.9 +8.2,0.28,0.4,2.4,0.052,4.0,10.0,0.99356,3.33,0.7,12.8 +8.4,0.25,0.39,2.0,0.041,4.0,10.0,0.99386,3.27,0.71,12.5 +8.2,0.28,0.4,2.4,0.052,4.0,10.0,0.99356,3.33,0.7,12.8 +7.4,0.53,0.12,1.9,0.165,4.0,12.0,0.99702,3.26,0.86,9.2 +7.6,0.48,0.31,2.8,0.07,4.0,15.0,0.99693,3.22,0.55,10.3 +7.3,0.49,0.1,2.6,0.068,4.0,14.0,0.99562,3.3,0.47,10.5 +12.9,0.5,0.55,2.8,0.072,7.0,24.0,1.00012,3.09,0.68,10.9 +10.8,0.45,0.33,2.5,0.099,20.0,38.0,0.99818,3.24,0.71,10.8 +6.9,0.39,0.24,2.1,0.102,4.0,7.0,0.99462,3.44,0.58,11.4 +12.6,0.41,0.54,2.8,0.103,19.0,41.0,0.99939,3.21,0.76,11.3 +10.8,0.45,0.33,2.5,0.099,20.0,38.0,0.99818,3.24,0.71,10.8 +9.8,0.51,0.19,3.2,0.081,8.0,30.0,0.9984,3.23,0.58,10.5 +10.8,0.29,0.42,1.6,0.084,19.0,27.0,0.99545,3.28,0.73,11.9 +7.1,0.715,0.0,2.35,0.071,21.0,47.0,0.99632,3.29,0.45,9.4 +9.1,0.66,0.15,3.2,0.097,9.0,59.0,0.99976,3.28,0.54,9.6 +7.0,0.685,0.0,1.9,0.099,9.0,22.0,0.99606,3.34,0.6,9.7 +4.9,0.42,0.0,2.1,0.048,16.0,42.0,0.99154,3.71,0.74,14.0 +6.7,0.54,0.13,2.0,0.076,15.0,36.0,0.9973,3.61,0.64,9.8 +6.7,0.54,0.13,2.0,0.076,15.0,36.0,0.9973,3.61,0.64,9.8 +7.1,0.48,0.28,2.8,0.068,6.0,16.0,0.99682,3.24,0.53,10.3 +7.1,0.46,0.14,2.8,0.076,15.0,37.0,0.99624,3.36,0.49,10.7 +7.5,0.27,0.34,2.3,0.05,4.0,8.0,0.9951,3.4,0.64,11.0 +7.1,0.46,0.14,2.8,0.076,15.0,37.0,0.99624,3.36,0.49,10.7 +7.8,0.57,0.09,2.3,0.065,34.0,45.0,0.99417,3.46,0.74,12.7 +5.9,0.61,0.08,2.1,0.071,16.0,24.0,0.99376,3.56,0.77,11.1 +7.5,0.685,0.07,2.5,0.058,5.0,9.0,0.99632,3.38,0.55,10.9 +5.9,0.61,0.08,2.1,0.071,16.0,24.0,0.99376,3.56,0.77,11.1 +10.4,0.44,0.42,1.5,0.145,34.0,48.0,0.99832,3.38,0.86,9.9 +11.6,0.47,0.44,1.6,0.147,36.0,51.0,0.99836,3.38,0.86,9.9 +8.8,0.685,0.26,1.6,0.088,16.0,23.0,0.99694,3.32,0.47,9.4 +7.6,0.665,0.1,1.5,0.066,27.0,55.0,0.99655,3.39,0.51,9.3 +6.7,0.28,0.28,2.4,0.012,36.0,100.0,0.99064,3.26,0.39,11.7 +6.7,0.28,0.28,2.4,0.012,36.0,100.0,0.99064,3.26,0.39,11.7 +10.1,0.31,0.35,1.6,0.075,9.0,28.0,0.99672,3.24,0.83,11.2 +6.0,0.5,0.04,2.2,0.092,13.0,26.0,0.99647,3.46,0.47,10.0 +11.1,0.42,0.47,2.65,0.085,9.0,34.0,0.99736,3.24,0.77,12.1 +6.6,0.66,0.0,3.0,0.115,21.0,31.0,0.99629,3.45,0.63,10.3 +10.6,0.5,0.45,2.6,0.119,34.0,68.0,0.99708,3.23,0.72,10.9 +7.1,0.685,0.35,2.0,0.088,9.0,92.0,0.9963,3.28,0.62,9.4 +9.9,0.25,0.46,1.7,0.062,26.0,42.0,0.9959,3.18,0.83,10.6 +6.4,0.64,0.21,1.8,0.081,14.0,31.0,0.99689,3.59,0.66,9.8 +6.4,0.64,0.21,1.8,0.081,14.0,31.0,0.99689,3.59,0.66,9.8 +7.4,0.68,0.16,1.8,0.078,12.0,39.0,0.9977,3.5,0.7,9.9 +6.4,0.64,0.21,1.8,0.081,14.0,31.0,0.99689,3.59,0.66,9.8 +6.4,0.63,0.21,1.6,0.08,12.0,32.0,0.99689,3.58,0.66,9.8 +9.3,0.43,0.44,1.9,0.085,9.0,22.0,0.99708,3.28,0.55,9.5 +9.3,0.43,0.44,1.9,0.085,9.0,22.0,0.99708,3.28,0.55,9.5 +8.0,0.42,0.32,2.5,0.08,26.0,122.0,0.99801,3.22,1.07,9.7 +9.3,0.36,0.39,1.5,0.08,41.0,55.0,0.99652,3.47,0.73,10.9 +9.3,0.36,0.39,1.5,0.08,41.0,55.0,0.99652,3.47,0.73,10.9 +7.6,0.735,0.02,2.5,0.071,10.0,14.0,0.99538,3.51,0.71,11.7 +9.3,0.36,0.39,1.5,0.08,41.0,55.0,0.99652,3.47,0.73,10.9 +8.2,0.26,0.34,2.5,0.073,16.0,47.0,0.99594,3.4,0.78,11.3 +11.7,0.28,0.47,1.7,0.054,17.0,32.0,0.99686,3.15,0.67,10.6 +6.8,0.56,0.22,1.8,0.074,15.0,24.0,0.99438,3.4,0.82,11.2 +7.2,0.62,0.06,2.7,0.077,15.0,85.0,0.99746,3.51,0.54,9.5 +5.8,1.01,0.66,2.0,0.039,15.0,88.0,0.99357,3.66,0.6,11.5 +7.5,0.42,0.32,2.7,0.067,7.0,25.0,0.99628,3.24,0.44,10.4 +7.2,0.62,0.06,2.5,0.078,17.0,84.0,0.99746,3.51,0.53,9.7 +7.2,0.62,0.06,2.7,0.077,15.0,85.0,0.99746,3.51,0.54,9.5 +7.2,0.635,0.07,2.6,0.077,16.0,86.0,0.99748,3.51,0.54,9.7 +6.8,0.49,0.22,2.3,0.071,13.0,24.0,0.99438,3.41,0.83,11.3 +6.9,0.51,0.23,2.0,0.072,13.0,22.0,0.99438,3.4,0.84,11.2 +6.8,0.56,0.22,1.8,0.074,15.0,24.0,0.99438,3.4,0.82,11.2 +7.6,0.63,0.03,2.0,0.08,27.0,43.0,0.99578,3.44,0.64,10.9 +7.7,0.715,0.01,2.1,0.064,31.0,43.0,0.99371,3.41,0.57,11.8 +6.9,0.56,0.03,1.5,0.086,36.0,46.0,0.99522,3.53,0.57,10.6 +7.3,0.35,0.24,2.0,0.067,28.0,48.0,0.99576,3.43,0.54,10.0 +9.1,0.21,0.37,1.6,0.067,6.0,10.0,0.99552,3.23,0.58,11.1 +10.4,0.38,0.46,2.1,0.104,6.0,10.0,0.99664,3.12,0.65,11.8 +8.8,0.31,0.4,2.8,0.109,7.0,16.0,0.99614,3.31,0.79,11.8 +7.1,0.47,0.0,2.2,0.067,7.0,14.0,0.99517,3.4,0.58,10.9 +7.7,0.715,0.01,2.1,0.064,31.0,43.0,0.99371,3.41,0.57,11.8 +8.8,0.61,0.19,4.0,0.094,30.0,69.0,0.99787,3.22,0.5,10.0 +7.2,0.6,0.04,2.5,0.076,18.0,88.0,0.99745,3.53,0.55,9.5 +9.2,0.56,0.18,1.6,0.078,10.0,21.0,0.99576,3.15,0.49,9.9 +7.6,0.715,0.0,2.1,0.068,30.0,35.0,0.99533,3.48,0.65,11.4 +8.4,0.31,0.29,3.1,0.194,14.0,26.0,0.99536,3.22,0.78,12.0 +7.2,0.6,0.04,2.5,0.076,18.0,88.0,0.99745,3.53,0.55,9.5 +8.8,0.61,0.19,4.0,0.094,30.0,69.0,0.99787,3.22,0.5,10.0 +8.9,0.75,0.14,2.5,0.086,9.0,30.0,0.99824,3.34,0.64,10.5 +9.0,0.8,0.12,2.4,0.083,8.0,28.0,0.99836,3.33,0.65,10.4 +10.7,0.52,0.38,2.6,0.066,29.0,56.0,0.99577,3.15,0.79,12.1 +6.8,0.57,0.0,2.5,0.072,32.0,64.0,0.99491,3.43,0.56,11.2 +10.7,0.9,0.34,6.6,0.112,23.0,99.0,1.00289,3.22,0.68,9.3 +7.2,0.34,0.24,2.0,0.071,30.0,52.0,0.99576,3.44,0.58,10.1 +7.2,0.66,0.03,2.3,0.078,16.0,86.0,0.99743,3.53,0.57,9.7 +10.1,0.45,0.23,1.9,0.082,10.0,18.0,0.99774,3.22,0.65,9.3 +7.2,0.66,0.03,2.3,0.078,16.0,86.0,0.99743,3.53,0.57,9.7 +7.2,0.63,0.03,2.2,0.08,17.0,88.0,0.99745,3.53,0.58,9.8 +7.1,0.59,0.01,2.3,0.08,27.0,43.0,0.9955,3.42,0.58,10.7 +8.3,0.31,0.39,2.4,0.078,17.0,43.0,0.99444,3.31,0.77,12.5 +7.1,0.59,0.01,2.3,0.08,27.0,43.0,0.9955,3.42,0.58,10.7 +8.3,0.31,0.39,2.4,0.078,17.0,43.0,0.99444,3.31,0.77,12.5 +8.3,1.02,0.02,3.4,0.084,6.0,11.0,0.99892,3.48,0.49,11.0 +8.9,0.31,0.36,2.6,0.056,10.0,39.0,0.99562,3.4,0.69,11.8 +7.4,0.635,0.1,2.4,0.08,16.0,33.0,0.99736,3.58,0.69,10.8 +7.4,0.635,0.1,2.4,0.08,16.0,33.0,0.99736,3.58,0.69,10.8 +6.8,0.59,0.06,6.0,0.06,11.0,18.0,0.9962,3.41,0.59,10.8 +6.8,0.59,0.06,6.0,0.06,11.0,18.0,0.9962,3.41,0.59,10.8 +9.2,0.58,0.2,3.0,0.081,15.0,115.0,0.998,3.23,0.59,9.5 +7.2,0.54,0.27,2.6,0.084,12.0,78.0,0.9964,3.39,0.71,11.0 +6.1,0.56,0.0,2.2,0.079,6.0,9.0,0.9948,3.59,0.54,11.5 +7.4,0.52,0.13,2.4,0.078,34.0,61.0,0.99528,3.43,0.59,10.8 +7.3,0.305,0.39,1.2,0.059,7.0,11.0,0.99331,3.29,0.52,11.5 +9.3,0.38,0.48,3.8,0.132,3.0,11.0,0.99577,3.23,0.57,13.2 +9.1,0.28,0.46,9.0,0.114,3.0,9.0,0.99901,3.18,0.6,10.9 +10.0,0.46,0.44,2.9,0.065,4.0,8.0,0.99674,3.33,0.62,12.2 +9.4,0.395,0.46,4.6,0.094,3.0,10.0,0.99639,3.27,0.64,12.2 +7.3,0.305,0.39,1.2,0.059,7.0,11.0,0.99331,3.29,0.52,11.5 +8.6,0.315,0.4,2.2,0.079,3.0,6.0,0.99512,3.27,0.67,11.9 +5.3,0.715,0.19,1.5,0.161,7.0,62.0,0.99395,3.62,0.61,11.0 +6.8,0.41,0.31,8.8,0.084,26.0,45.0,0.99824,3.38,0.64,10.1 +8.4,0.36,0.32,2.2,0.081,32.0,79.0,0.9964,3.3,0.72,11.0 +8.4,0.62,0.12,1.8,0.072,38.0,46.0,0.99504,3.38,0.89,11.8 +9.6,0.41,0.37,2.3,0.091,10.0,23.0,0.99786,3.24,0.56,10.5 +8.4,0.36,0.32,2.2,0.081,32.0,79.0,0.9964,3.3,0.72,11.0 +8.4,0.62,0.12,1.8,0.072,38.0,46.0,0.99504,3.38,0.89,11.8 +6.8,0.41,0.31,8.8,0.084,26.0,45.0,0.99824,3.38,0.64,10.1 +8.6,0.47,0.27,2.3,0.055,14.0,28.0,0.99516,3.18,0.8,11.2 +8.6,0.22,0.36,1.9,0.064,53.0,77.0,0.99604,3.47,0.87,11.0 +9.4,0.24,0.33,2.3,0.061,52.0,73.0,0.99786,3.47,0.9,10.2 +8.4,0.67,0.19,2.2,0.093,11.0,75.0,0.99736,3.2,0.59,9.2 +8.6,0.47,0.27,2.3,0.055,14.0,28.0,0.99516,3.18,0.8,11.2 +8.7,0.33,0.38,3.3,0.063,10.0,19.0,0.99468,3.3,0.73,12.0 +6.6,0.61,0.01,1.9,0.08,8.0,25.0,0.99746,3.69,0.73,10.5 +7.4,0.61,0.01,2.0,0.074,13.0,38.0,0.99748,3.48,0.65,9.8 +7.6,0.4,0.29,1.9,0.078,29.0,66.0,0.9971,3.45,0.59,9.5 +7.4,0.61,0.01,2.0,0.074,13.0,38.0,0.99748,3.48,0.65,9.8 +6.6,0.61,0.01,1.9,0.08,8.0,25.0,0.99746,3.69,0.73,10.5 +8.8,0.3,0.38,2.3,0.06,19.0,72.0,0.99543,3.39,0.72,11.8 +8.8,0.3,0.38,2.3,0.06,19.0,72.0,0.99543,3.39,0.72,11.8 +12.0,0.63,0.5,1.4,0.071,6.0,26.0,0.99791,3.07,0.6,10.4 +7.2,0.38,0.38,2.8,0.068,23.0,42.0,0.99356,3.34,0.72,12.9 +6.2,0.46,0.17,1.6,0.073,7.0,11.0,0.99425,3.61,0.54,11.4 +9.6,0.33,0.52,2.2,0.074,13.0,25.0,0.99509,3.36,0.76,12.4 +9.9,0.27,0.49,5.0,0.082,9.0,17.0,0.99484,3.19,0.52,12.5 +10.1,0.43,0.4,2.6,0.092,13.0,52.0,0.99834,3.22,0.64,10.0 +9.8,0.5,0.34,2.3,0.094,10.0,45.0,0.99864,3.24,0.6,9.7 +8.3,0.3,0.49,3.8,0.09,11.0,24.0,0.99498,3.27,0.64,12.1 +10.2,0.44,0.42,2.0,0.071,7.0,20.0,0.99566,3.14,0.79,11.1 +10.2,0.44,0.58,4.1,0.092,11.0,24.0,0.99745,3.29,0.99,12.0 +8.3,0.28,0.48,2.1,0.093,6.0,12.0,0.99408,3.26,0.62,12.4 +8.9,0.12,0.45,1.8,0.075,10.0,21.0,0.99552,3.41,0.76,11.9 +8.9,0.12,0.45,1.8,0.075,10.0,21.0,0.99552,3.41,0.76,11.9 +8.9,0.12,0.45,1.8,0.075,10.0,21.0,0.99552,3.41,0.76,11.9 +8.3,0.28,0.48,2.1,0.093,6.0,12.0,0.99408,3.26,0.62,12.4 +8.2,0.31,0.4,2.2,0.058,6.0,10.0,0.99536,3.31,0.68,11.2 +10.2,0.34,0.48,2.1,0.052,5.0,9.0,0.99458,3.2,0.69,12.1 +7.6,0.43,0.4,2.7,0.082,6.0,11.0,0.99538,3.44,0.54,12.2 +8.5,0.21,0.52,1.9,0.09,9.0,23.0,0.99648,3.36,0.67,10.4 +9.0,0.36,0.52,2.1,0.111,5.0,10.0,0.99568,3.31,0.62,11.3 +9.5,0.37,0.52,2.0,0.088,12.0,51.0,0.99613,3.29,0.58,11.1 +6.4,0.57,0.12,2.3,0.12,25.0,36.0,0.99519,3.47,0.71,11.3 +8.0,0.59,0.05,2.0,0.089,12.0,32.0,0.99735,3.36,0.61,10.0 +8.5,0.47,0.27,1.9,0.058,18.0,38.0,0.99518,3.16,0.85,11.1 +7.1,0.56,0.14,1.6,0.078,7.0,18.0,0.99592,3.27,0.62,9.3 +6.6,0.57,0.02,2.1,0.115,6.0,16.0,0.99654,3.38,0.69,9.5 +8.8,0.27,0.39,2.0,0.1,20.0,27.0,0.99546,3.15,0.69,11.2 +8.5,0.47,0.27,1.9,0.058,18.0,38.0,0.99518,3.16,0.85,11.1 +8.3,0.34,0.4,2.4,0.065,24.0,48.0,0.99554,3.34,0.86,11.0 +9.0,0.38,0.41,2.4,0.103,6.0,10.0,0.99604,3.13,0.58,11.9 +8.5,0.66,0.2,2.1,0.097,23.0,113.0,0.99733,3.13,0.48,9.2 +9.0,0.4,0.43,2.4,0.068,29.0,46.0,0.9943,3.2,0.6,12.2 +6.7,0.56,0.09,2.9,0.079,7.0,22.0,0.99669,3.46,0.61,10.2 +10.4,0.26,0.48,1.9,0.066,6.0,10.0,0.99724,3.33,0.87,10.9 +10.4,0.26,0.48,1.9,0.066,6.0,10.0,0.99724,3.33,0.87,10.9 +10.1,0.38,0.5,2.4,0.104,6.0,13.0,0.99643,3.22,0.65,11.6 +8.5,0.34,0.44,1.7,0.079,6.0,12.0,0.99605,3.52,0.63,10.7 +8.8,0.33,0.41,5.9,0.073,7.0,13.0,0.99658,3.3,0.62,12.1 +7.2,0.41,0.3,2.1,0.083,35.0,72.0,0.997,3.44,0.52,9.4 +7.2,0.41,0.3,2.1,0.083,35.0,72.0,0.997,3.44,0.52,9.4 +8.4,0.59,0.29,2.6,0.109,31.0,119.0,0.99801,3.15,0.5,9.1 +7.0,0.4,0.32,3.6,0.061,9.0,29.0,0.99416,3.28,0.49,11.3 +12.2,0.45,0.49,1.4,0.075,3.0,6.0,0.9969,3.13,0.63,10.4 +9.1,0.5,0.3,1.9,0.065,8.0,17.0,0.99774,3.32,0.71,10.5 +9.5,0.86,0.26,1.9,0.079,13.0,28.0,0.99712,3.25,0.62,10.0 +7.3,0.52,0.32,2.1,0.07,51.0,70.0,0.99418,3.34,0.82,12.9 +9.1,0.5,0.3,1.9,0.065,8.0,17.0,0.99774,3.32,0.71,10.5 +12.2,0.45,0.49,1.4,0.075,3.0,6.0,0.9969,3.13,0.63,10.4 +7.4,0.58,0.0,2.0,0.064,7.0,11.0,0.99562,3.45,0.58,11.3 +9.8,0.34,0.39,1.4,0.066,3.0,7.0,0.9947,3.19,0.55,11.4 +7.1,0.36,0.3,1.6,0.08,35.0,70.0,0.99693,3.44,0.5,9.4 +7.7,0.39,0.12,1.7,0.097,19.0,27.0,0.99596,3.16,0.49,9.4 +9.7,0.295,0.4,1.5,0.073,14.0,21.0,0.99556,3.14,0.51,10.9 +7.7,0.39,0.12,1.7,0.097,19.0,27.0,0.99596,3.16,0.49,9.4 +7.1,0.34,0.28,2.0,0.082,31.0,68.0,0.99694,3.45,0.48,9.4 +6.5,0.4,0.1,2.0,0.076,30.0,47.0,0.99554,3.36,0.48,9.4 +7.1,0.34,0.28,2.0,0.082,31.0,68.0,0.99694,3.45,0.48,9.4 +10.0,0.35,0.45,2.5,0.092,20.0,88.0,0.99918,3.15,0.43,9.4 +7.7,0.6,0.06,2.0,0.079,19.0,41.0,0.99697,3.39,0.62,10.1 +5.6,0.66,0.0,2.2,0.087,3.0,11.0,0.99378,3.71,0.63,12.8 +5.6,0.66,0.0,2.2,0.087,3.0,11.0,0.99378,3.71,0.63,12.8 +8.9,0.84,0.34,1.4,0.05,4.0,10.0,0.99554,3.12,0.48,9.1 +6.4,0.69,0.0,1.65,0.055,7.0,12.0,0.99162,3.47,0.53,12.9 +7.5,0.43,0.3,2.2,0.062,6.0,12.0,0.99495,3.44,0.72,11.5 +9.9,0.35,0.38,1.5,0.058,31.0,47.0,0.99676,3.26,0.82,10.6 +9.1,0.29,0.33,2.05,0.063,13.0,27.0,0.99516,3.26,0.84,11.7 +6.8,0.36,0.32,1.8,0.067,4.0,8.0,0.9928,3.36,0.55,12.8 +8.2,0.43,0.29,1.6,0.081,27.0,45.0,0.99603,3.25,0.54,10.3 +6.8,0.36,0.32,1.8,0.067,4.0,8.0,0.9928,3.36,0.55,12.8 +9.1,0.29,0.33,2.05,0.063,13.0,27.0,0.99516,3.26,0.84,11.7 +9.1,0.3,0.34,2.0,0.064,12.0,25.0,0.99516,3.26,0.84,11.7 +8.9,0.35,0.4,3.6,0.11,12.0,24.0,0.99549,3.23,0.7,12.0 +9.6,0.5,0.36,2.8,0.116,26.0,55.0,0.99722,3.18,0.68,10.9 +8.9,0.28,0.45,1.7,0.067,7.0,12.0,0.99354,3.25,0.55,12.3 +8.9,0.32,0.31,2.0,0.088,12.0,19.0,0.9957,3.17,0.55,10.4 +7.7,1.005,0.15,2.1,0.102,11.0,32.0,0.99604,3.23,0.48,10.0 +7.5,0.71,0.0,1.6,0.092,22.0,31.0,0.99635,3.38,0.58,10.0 +8.0,0.58,0.16,2.0,0.12,3.0,7.0,0.99454,3.22,0.58,11.2 +10.5,0.39,0.46,2.2,0.075,14.0,27.0,0.99598,3.06,0.84,11.4 +8.9,0.38,0.4,2.2,0.068,12.0,28.0,0.99486,3.27,0.75,12.6 +8.0,0.18,0.37,0.9,0.049,36.0,109.0,0.99007,2.89,0.44,12.7 +8.0,0.18,0.37,0.9,0.049,36.0,109.0,0.99007,2.89,0.44,12.7 +7.0,0.5,0.14,1.8,0.078,10.0,23.0,0.99636,3.53,0.61,10.4 +11.3,0.36,0.66,2.4,0.123,3.0,8.0,0.99642,3.2,0.53,11.9 +11.3,0.36,0.66,2.4,0.123,3.0,8.0,0.99642,3.2,0.53,11.9 +7.0,0.51,0.09,2.1,0.062,4.0,9.0,0.99584,3.35,0.54,10.5 +8.2,0.32,0.42,2.3,0.098,3.0,9.0,0.99506,3.27,0.55,12.3 +7.7,0.58,0.01,1.8,0.088,12.0,18.0,0.99568,3.32,0.56,10.5 +8.6,0.83,0.0,2.8,0.095,17.0,43.0,0.99822,3.33,0.6,10.4 +7.9,0.31,0.32,1.9,0.066,14.0,36.0,0.99364,3.41,0.56,12.6 +6.4,0.795,0.0,2.2,0.065,28.0,52.0,0.99378,3.49,0.52,11.6 +7.2,0.34,0.21,2.5,0.075,41.0,68.0,0.99586,3.37,0.54,10.1 +7.7,0.58,0.01,1.8,0.088,12.0,18.0,0.99568,3.32,0.56,10.5 +7.1,0.59,0.0,2.1,0.091,9.0,14.0,0.99488,3.42,0.55,11.5 +7.3,0.55,0.01,1.8,0.093,9.0,15.0,0.99514,3.35,0.58,11.0 +8.1,0.82,0.0,4.1,0.095,5.0,14.0,0.99854,3.36,0.53,9.6 +7.5,0.57,0.08,2.6,0.089,14.0,27.0,0.99592,3.3,0.59,10.4 +8.9,0.745,0.18,2.5,0.077,15.0,48.0,0.99739,3.2,0.47,9.7 +10.1,0.37,0.34,2.4,0.085,5.0,17.0,0.99683,3.17,0.65,10.6 +7.6,0.31,0.34,2.5,0.082,26.0,35.0,0.99356,3.22,0.59,12.5 +7.3,0.91,0.1,1.8,0.074,20.0,56.0,0.99672,3.35,0.56,9.2 +8.7,0.41,0.41,6.2,0.078,25.0,42.0,0.9953,3.24,0.77,12.6 +8.9,0.5,0.21,2.2,0.088,21.0,39.0,0.99692,3.33,0.83,11.1 +7.4,0.965,0.0,2.2,0.088,16.0,32.0,0.99756,3.58,0.67,10.2 +6.9,0.49,0.19,1.7,0.079,13.0,26.0,0.99547,3.38,0.64,9.8 +8.9,0.5,0.21,2.2,0.088,21.0,39.0,0.99692,3.33,0.83,11.1 +9.5,0.39,0.41,8.9,0.069,18.0,39.0,0.99859,3.29,0.81,10.9 +6.4,0.39,0.33,3.3,0.046,12.0,53.0,0.99294,3.36,0.62,12.2 +6.9,0.44,0.0,1.4,0.07,32.0,38.0,0.99438,3.32,0.58,11.4 +7.6,0.78,0.0,1.7,0.076,33.0,45.0,0.99612,3.31,0.62,10.7 +7.1,0.43,0.17,1.8,0.082,27.0,51.0,0.99634,3.49,0.64,10.4 +9.3,0.49,0.36,1.7,0.081,3.0,14.0,0.99702,3.27,0.78,10.9 +9.3,0.5,0.36,1.8,0.084,6.0,17.0,0.99704,3.27,0.77,10.8 +7.1,0.43,0.17,1.8,0.082,27.0,51.0,0.99634,3.49,0.64,10.4 +8.5,0.46,0.59,1.4,0.414,16.0,45.0,0.99702,3.03,1.34,9.2 +5.6,0.605,0.05,2.4,0.073,19.0,25.0,0.99258,3.56,0.55,12.9 +8.3,0.33,0.42,2.3,0.07,9.0,20.0,0.99426,3.38,0.77,12.7 +8.2,0.64,0.27,2.0,0.095,5.0,77.0,0.99747,3.13,0.62,9.1 +8.2,0.64,0.27,2.0,0.095,5.0,77.0,0.99747,3.13,0.62,9.1 +8.9,0.48,0.53,4.0,0.101,3.0,10.0,0.99586,3.21,0.59,12.1 +7.6,0.42,0.25,3.9,0.104,28.0,90.0,0.99784,3.15,0.57,9.1 +9.9,0.53,0.57,2.4,0.093,30.0,52.0,0.9971,3.19,0.76,11.6 +8.9,0.48,0.53,4.0,0.101,3.0,10.0,0.99586,3.21,0.59,12.1 +11.6,0.23,0.57,1.8,0.074,3.0,8.0,0.9981,3.14,0.7,9.9 +9.1,0.4,0.5,1.8,0.071,7.0,16.0,0.99462,3.21,0.69,12.5 +8.0,0.38,0.44,1.9,0.098,6.0,15.0,0.9956,3.3,0.64,11.4 +10.2,0.29,0.65,2.4,0.075,6.0,17.0,0.99565,3.22,0.63,11.8 +8.2,0.74,0.09,2.0,0.067,5.0,10.0,0.99418,3.28,0.57,11.8 +7.7,0.61,0.18,2.4,0.083,6.0,20.0,0.9963,3.29,0.6,10.2 +6.6,0.52,0.08,2.4,0.07,13.0,26.0,0.99358,3.4,0.72,12.5 +11.1,0.31,0.53,2.2,0.06,3.0,10.0,0.99572,3.02,0.83,10.9 +11.1,0.31,0.53,2.2,0.06,3.0,10.0,0.99572,3.02,0.83,10.9 +8.0,0.62,0.35,2.8,0.086,28.0,52.0,0.997,3.31,0.62,10.8 +9.3,0.33,0.45,1.5,0.057,19.0,37.0,0.99498,3.18,0.89,11.1 +7.5,0.77,0.2,8.1,0.098,30.0,92.0,0.99892,3.2,0.58,9.2 +7.2,0.35,0.26,1.8,0.083,33.0,75.0,0.9968,3.4,0.58,9.5 +8.0,0.62,0.33,2.7,0.088,16.0,37.0,0.9972,3.31,0.58,10.7 +7.5,0.77,0.2,8.1,0.098,30.0,92.0,0.99892,3.2,0.58,9.2 +9.1,0.25,0.34,2.0,0.071,45.0,67.0,0.99769,3.44,0.86,10.2 +9.9,0.32,0.56,2.0,0.073,3.0,8.0,0.99534,3.15,0.73,11.4 +8.6,0.37,0.65,6.4,0.08,3.0,8.0,0.99817,3.27,0.58,11.0 +8.6,0.37,0.65,6.4,0.08,3.0,8.0,0.99817,3.27,0.58,11.0 +7.9,0.3,0.68,8.3,0.05,37.5,278.0,0.99316,3.01,0.51,12.3 +10.3,0.27,0.56,1.4,0.047,3.0,8.0,0.99471,3.16,0.51,11.8 +7.9,0.3,0.68,8.3,0.05,37.5,289.0,0.99316,3.01,0.51,12.3 +7.2,0.38,0.3,1.8,0.073,31.0,70.0,0.99685,3.42,0.59,9.5 +8.7,0.42,0.45,2.4,0.072,32.0,59.0,0.99617,3.33,0.77,12.0 +7.2,0.38,0.3,1.8,0.073,31.0,70.0,0.99685,3.42,0.59,9.5 +6.8,0.48,0.08,1.8,0.074,40.0,64.0,0.99529,3.12,0.49,9.6 +8.5,0.34,0.4,4.7,0.055,3.0,9.0,0.99738,3.38,0.66,11.6 +7.9,0.19,0.42,1.6,0.057,18.0,30.0,0.994,3.29,0.69,11.2 +11.6,0.41,0.54,1.5,0.095,22.0,41.0,0.99735,3.02,0.76,9.9 +11.6,0.41,0.54,1.5,0.095,22.0,41.0,0.99735,3.02,0.76,9.9 +10.0,0.26,0.54,1.9,0.083,42.0,74.0,0.99451,2.98,0.63,11.8 +7.9,0.34,0.42,2.0,0.086,8.0,19.0,0.99546,3.35,0.6,11.4 +7.0,0.54,0.09,2.0,0.081,10.0,16.0,0.99479,3.43,0.59,11.5 +9.2,0.31,0.36,2.2,0.079,11.0,31.0,0.99615,3.33,0.86,12.0 +6.6,0.725,0.09,5.5,0.117,9.0,17.0,0.99655,3.35,0.49,10.8 +9.4,0.4,0.47,2.5,0.087,6.0,20.0,0.99772,3.15,0.5,10.5 +6.6,0.725,0.09,5.5,0.117,9.0,17.0,0.99655,3.35,0.49,10.8 +8.6,0.52,0.38,1.5,0.096,5.0,18.0,0.99666,3.2,0.52,9.4 +8.0,0.31,0.45,2.1,0.216,5.0,16.0,0.99358,3.15,0.81,12.5 +8.6,0.52,0.38,1.5,0.096,5.0,18.0,0.99666,3.2,0.52,9.4 +8.4,0.34,0.42,2.1,0.072,23.0,36.0,0.99392,3.11,0.78,12.4 +7.4,0.49,0.27,2.1,0.071,14.0,25.0,0.99388,3.35,0.63,12.0 +6.1,0.48,0.09,1.7,0.078,18.0,30.0,0.99402,3.45,0.54,11.2 +7.4,0.49,0.27,2.1,0.071,14.0,25.0,0.99388,3.35,0.63,12.0 +8.0,0.48,0.34,2.2,0.073,16.0,25.0,0.9936,3.28,0.66,12.4 +6.3,0.57,0.28,2.1,0.048,13.0,49.0,0.99374,3.41,0.6,12.8 +8.2,0.23,0.42,1.9,0.069,9.0,17.0,0.99376,3.21,0.54,12.3 +9.1,0.3,0.41,2.0,0.068,10.0,24.0,0.99523,3.27,0.85,11.7 +8.1,0.78,0.1,3.3,0.09,4.0,13.0,0.99855,3.36,0.49,9.5 +10.8,0.47,0.43,2.1,0.171,27.0,66.0,0.9982,3.17,0.76,10.8 +8.3,0.53,0.0,1.4,0.07,6.0,14.0,0.99593,3.25,0.64,10.0 +5.4,0.42,0.27,2.0,0.092,23.0,55.0,0.99471,3.78,0.64,12.3 +7.9,0.33,0.41,1.5,0.056,6.0,35.0,0.99396,3.29,0.71,11.0 +8.9,0.24,0.39,1.6,0.074,3.0,10.0,0.99698,3.12,0.59,9.5 +5.0,0.4,0.5,4.3,0.046,29.0,80.0,0.9902,3.49,0.66,13.6 +7.0,0.69,0.07,2.5,0.091,15.0,21.0,0.99572,3.38,0.6,11.3 +7.0,0.69,0.07,2.5,0.091,15.0,21.0,0.99572,3.38,0.6,11.3 +7.0,0.69,0.07,2.5,0.091,15.0,21.0,0.99572,3.38,0.6,11.3 +7.1,0.39,0.12,2.1,0.065,14.0,24.0,0.99252,3.3,0.53,13.3 +5.6,0.66,0.0,2.5,0.066,7.0,15.0,0.99256,3.52,0.58,12.9 +7.9,0.54,0.34,2.5,0.076,8.0,17.0,0.99235,3.2,0.72,13.1 +6.6,0.5,0.0,1.8,0.062,21.0,28.0,0.99352,3.44,0.55,12.3 +6.3,0.47,0.0,1.4,0.055,27.0,33.0,0.9922,3.45,0.48,12.3 +10.7,0.4,0.37,1.9,0.081,17.0,29.0,0.99674,3.12,0.65,11.2 +6.5,0.58,0.0,2.2,0.096,3.0,13.0,0.99557,3.62,0.62,11.5 +8.8,0.24,0.35,1.7,0.055,13.0,27.0,0.99394,3.14,0.59,11.3 +5.8,0.29,0.26,1.7,0.063,3.0,11.0,0.9915,3.39,0.54,13.5 +6.3,0.76,0.0,2.9,0.072,26.0,52.0,0.99379,3.51,0.6,11.5 +10.0,0.43,0.33,2.7,0.095,28.0,89.0,0.9984,3.22,0.68,10.0 +10.5,0.43,0.35,3.3,0.092,24.0,70.0,0.99798,3.21,0.69,10.5 +9.1,0.6,0.0,1.9,0.058,5.0,10.0,0.9977,3.18,0.63,10.4 +5.9,0.19,0.21,1.7,0.045,57.0,135.0,0.99341,3.32,0.44,9.5 +7.4,0.36,0.34,1.8,0.075,18.0,38.0,0.9933,3.38,0.88,13.6 +7.2,0.48,0.07,5.5,0.089,10.0,18.0,0.99684,3.37,0.68,11.2 +8.5,0.28,0.35,1.7,0.061,6.0,15.0,0.99524,3.3,0.74,11.8 +8.0,0.25,0.43,1.7,0.067,22.0,50.0,0.9946,3.38,0.6,11.9 +10.4,0.52,0.45,2.0,0.08,6.0,13.0,0.99774,3.22,0.76,11.4 +10.4,0.52,0.45,2.0,0.08,6.0,13.0,0.99774,3.22,0.76,11.4 +7.5,0.41,0.15,3.7,0.104,29.0,94.0,0.99786,3.14,0.58,9.1 +8.2,0.51,0.24,2.0,0.079,16.0,86.0,0.99764,3.34,0.64,9.5 +7.3,0.4,0.3,1.7,0.08,33.0,79.0,0.9969,3.41,0.65,9.5 +8.2,0.38,0.32,2.5,0.08,24.0,71.0,0.99624,3.27,0.85,11.0 +6.9,0.45,0.11,2.4,0.043,6.0,12.0,0.99354,3.3,0.65,11.4 +7.0,0.22,0.3,1.8,0.065,16.0,20.0,0.99672,3.61,0.82,10.0 +7.3,0.32,0.23,2.3,0.066,35.0,70.0,0.99588,3.43,0.62,10.1 +8.2,0.2,0.43,2.5,0.076,31.0,51.0,0.99672,3.53,0.81,10.4 +7.8,0.5,0.12,1.8,0.178,6.0,21.0,0.996,3.28,0.87,9.8 +10.0,0.41,0.45,6.2,0.071,6.0,14.0,0.99702,3.21,0.49,11.8 +7.8,0.39,0.42,2.0,0.086,9.0,21.0,0.99526,3.39,0.66,11.6 +10.0,0.35,0.47,2.0,0.061,6.0,11.0,0.99585,3.23,0.52,12.0 +8.2,0.33,0.32,2.8,0.067,4.0,12.0,0.99473,3.3,0.76,12.8 +6.1,0.58,0.23,2.5,0.044,16.0,70.0,0.99352,3.46,0.65,12.5 +8.3,0.6,0.25,2.2,0.118,9.0,38.0,0.99616,3.15,0.53,9.8 +9.6,0.42,0.35,2.1,0.083,17.0,38.0,0.99622,3.23,0.66,11.1 +6.6,0.58,0.0,2.2,0.1,50.0,63.0,0.99544,3.59,0.68,11.4 +8.3,0.6,0.25,2.2,0.118,9.0,38.0,0.99616,3.15,0.53,9.8 +8.5,0.18,0.51,1.75,0.071,45.0,88.0,0.99524,3.33,0.76,11.8 +5.1,0.51,0.18,2.1,0.042,16.0,101.0,0.9924,3.46,0.87,12.9 +6.7,0.41,0.43,2.8,0.076,22.0,54.0,0.99572,3.42,1.16,10.6 +10.2,0.41,0.43,2.2,0.11,11.0,37.0,0.99728,3.16,0.67,10.8 +10.6,0.36,0.57,2.3,0.087,6.0,20.0,0.99676,3.14,0.72,11.1 +8.8,0.45,0.43,1.4,0.076,12.0,21.0,0.99551,3.21,0.75,10.2 +8.5,0.32,0.42,2.3,0.075,12.0,19.0,0.99434,3.14,0.71,11.8 +9.0,0.785,0.24,1.7,0.078,10.0,21.0,0.99692,3.29,0.67,10.0 +9.0,0.785,0.24,1.7,0.078,10.0,21.0,0.99692,3.29,0.67,10.0 +8.5,0.44,0.5,1.9,0.369,15.0,38.0,0.99634,3.01,1.1,9.4 +9.9,0.54,0.26,2.0,0.111,7.0,60.0,0.99709,2.94,0.98,10.2 +8.2,0.33,0.39,2.5,0.074,29.0,48.0,0.99528,3.32,0.88,12.4 +6.5,0.34,0.27,2.8,0.067,8.0,44.0,0.99384,3.21,0.56,12.0 +7.6,0.5,0.29,2.3,0.086,5.0,14.0,0.99502,3.32,0.62,11.5 +9.2,0.36,0.34,1.6,0.062,5.0,12.0,0.99667,3.2,0.67,10.5 +7.1,0.59,0.0,2.2,0.078,26.0,44.0,0.99522,3.42,0.68,10.8 +9.7,0.42,0.46,2.1,0.074,5.0,16.0,0.99649,3.27,0.74,12.3 +7.6,0.36,0.31,1.7,0.079,26.0,65.0,0.99716,3.46,0.62,9.5 +7.6,0.36,0.31,1.7,0.079,26.0,65.0,0.99716,3.46,0.62,9.5 +6.5,0.61,0.0,2.2,0.095,48.0,59.0,0.99541,3.61,0.7,11.5 +6.5,0.88,0.03,5.6,0.079,23.0,47.0,0.99572,3.58,0.5,11.2 +7.1,0.66,0.0,2.4,0.052,6.0,11.0,0.99318,3.35,0.66,12.7 +5.6,0.915,0.0,2.1,0.041,17.0,78.0,0.99346,3.68,0.73,11.4 +8.2,0.35,0.33,2.4,0.076,11.0,47.0,0.99599,3.27,0.81,11.0 +8.2,0.35,0.33,2.4,0.076,11.0,47.0,0.99599,3.27,0.81,11.0 +9.8,0.39,0.43,1.65,0.068,5.0,11.0,0.99478,3.19,0.46,11.4 +10.2,0.4,0.4,2.5,0.068,41.0,54.0,0.99754,3.38,0.86,10.5 +6.8,0.66,0.07,1.6,0.07,16.0,61.0,0.99572,3.29,0.6,9.3 +6.7,0.64,0.23,2.1,0.08,11.0,119.0,0.99538,3.36,0.7,10.9 +7.0,0.43,0.3,2.0,0.085,6.0,39.0,0.99346,3.33,0.46,11.9 +6.6,0.8,0.03,7.8,0.079,6.0,12.0,0.9963,3.52,0.5,12.2 +7.0,0.43,0.3,2.0,0.085,6.0,39.0,0.99346,3.33,0.46,11.9 +6.7,0.64,0.23,2.1,0.08,11.0,119.0,0.99538,3.36,0.7,10.9 +8.8,0.955,0.05,1.8,0.075,5.0,19.0,0.99616,3.3,0.44,9.6 +9.1,0.4,0.57,4.6,0.08,6.0,20.0,0.99652,3.28,0.57,12.5 +6.5,0.885,0.0,2.3,0.166,6.0,12.0,0.99551,3.56,0.51,10.8 +7.2,0.25,0.37,2.5,0.063,11.0,41.0,0.99439,3.52,0.8,12.4 +6.4,0.885,0.0,2.3,0.166,6.0,12.0,0.99551,3.56,0.51,10.8 +7.0,0.745,0.12,1.8,0.114,15.0,64.0,0.99588,3.22,0.59,9.5 +6.2,0.43,0.22,1.8,0.078,21.0,56.0,0.99633,3.52,0.6,9.5 +7.9,0.58,0.23,2.3,0.076,23.0,94.0,0.99686,3.21,0.58,9.5 +7.7,0.57,0.21,1.5,0.069,4.0,9.0,0.99458,3.16,0.54,9.8 +7.7,0.26,0.26,2.0,0.052,19.0,77.0,0.9951,3.15,0.79,10.9 +7.9,0.58,0.23,2.3,0.076,23.0,94.0,0.99686,3.21,0.58,9.5 +7.7,0.57,0.21,1.5,0.069,4.0,9.0,0.99458,3.16,0.54,9.8 +7.9,0.34,0.36,1.9,0.065,5.0,10.0,0.99419,3.27,0.54,11.2 +8.6,0.42,0.39,1.8,0.068,6.0,12.0,0.99516,3.35,0.69,11.7 +9.9,0.74,0.19,5.8,0.111,33.0,76.0,0.99878,3.14,0.55,9.4 +7.2,0.36,0.46,2.1,0.074,24.0,44.0,0.99534,3.4,0.85,11.0 +7.2,0.36,0.46,2.1,0.074,24.0,44.0,0.99534,3.4,0.85,11.0 +7.2,0.36,0.46,2.1,0.074,24.0,44.0,0.99534,3.4,0.85,11.0 +9.9,0.72,0.55,1.7,0.136,24.0,52.0,0.99752,3.35,0.94,10.0 +7.2,0.36,0.46,2.1,0.074,24.0,44.0,0.99534,3.4,0.85,11.0 +6.2,0.39,0.43,2.0,0.071,14.0,24.0,0.99428,3.45,0.87,11.2 +6.8,0.65,0.02,2.1,0.078,8.0,15.0,0.99498,3.35,0.62,10.4 +6.6,0.44,0.15,2.1,0.076,22.0,53.0,0.9957,3.32,0.62,9.3 +6.8,0.65,0.02,2.1,0.078,8.0,15.0,0.99498,3.35,0.62,10.4 +9.6,0.38,0.42,1.9,0.071,5.0,13.0,0.99659,3.15,0.75,10.5 +10.2,0.33,0.46,1.9,0.081,6.0,9.0,0.99628,3.1,0.48,10.4 +8.8,0.27,0.46,2.1,0.095,20.0,29.0,0.99488,3.26,0.56,11.3 +7.9,0.57,0.31,2.0,0.079,10.0,79.0,0.99677,3.29,0.69,9.5 +8.2,0.34,0.37,1.9,0.057,43.0,74.0,0.99408,3.23,0.81,12.0 +8.2,0.4,0.31,1.9,0.082,8.0,24.0,0.996,3.24,0.69,10.6 +9.0,0.39,0.4,1.3,0.044,25.0,50.0,0.99478,3.2,0.83,10.9 +10.9,0.32,0.52,1.8,0.132,17.0,44.0,0.99734,3.28,0.77,11.5 +10.9,0.32,0.52,1.8,0.132,17.0,44.0,0.99734,3.28,0.77,11.5 +8.1,0.53,0.22,2.2,0.078,33.0,89.0,0.99678,3.26,0.46,9.6 +10.5,0.36,0.47,2.2,0.074,9.0,23.0,0.99638,3.23,0.76,12.0 +12.6,0.39,0.49,2.5,0.08,8.0,20.0,0.9992,3.07,0.82,10.3 +9.2,0.46,0.23,2.6,0.091,18.0,77.0,0.99922,3.15,0.51,9.4 +7.5,0.58,0.03,4.1,0.08,27.0,46.0,0.99592,3.02,0.47,9.2 +9.0,0.58,0.25,2.0,0.104,8.0,21.0,0.99769,3.27,0.72,9.6 +5.1,0.42,0.0,1.8,0.044,18.0,88.0,0.99157,3.68,0.73,13.6 +7.6,0.43,0.29,2.1,0.075,19.0,66.0,0.99718,3.4,0.64,9.5 +7.7,0.18,0.34,2.7,0.066,15.0,58.0,0.9947,3.37,0.78,11.8 +7.8,0.815,0.01,2.6,0.074,48.0,90.0,0.99621,3.38,0.62,10.8 +7.6,0.43,0.29,2.1,0.075,19.0,66.0,0.99718,3.4,0.64,9.5 +10.2,0.23,0.37,2.2,0.057,14.0,36.0,0.99614,3.23,0.49,9.3 +7.1,0.75,0.01,2.2,0.059,11.0,18.0,0.99242,3.39,0.4,12.8 +6.0,0.33,0.32,12.9,0.054,6.0,113.0,0.99572,3.3,0.56,11.5 +7.8,0.55,0.0,1.7,0.07,7.0,17.0,0.99659,3.26,0.64,9.4 +7.1,0.75,0.01,2.2,0.059,11.0,18.0,0.99242,3.39,0.4,12.8 +8.1,0.73,0.0,2.5,0.081,12.0,24.0,0.99798,3.38,0.46,9.6 +6.5,0.67,0.0,4.3,0.057,11.0,20.0,0.99488,3.45,0.56,11.8 +7.5,0.61,0.2,1.7,0.076,36.0,60.0,0.99494,3.1,0.4,9.3 +9.8,0.37,0.39,2.5,0.079,28.0,65.0,0.99729,3.16,0.59,9.8 +9.0,0.4,0.41,2.0,0.058,15.0,40.0,0.99414,3.22,0.6,12.2 +8.3,0.56,0.22,2.4,0.082,10.0,86.0,0.9983,3.37,0.62,9.5 +5.9,0.29,0.25,13.4,0.067,72.0,160.0,0.99721,3.33,0.54,10.3 +7.4,0.55,0.19,1.8,0.082,15.0,34.0,0.99655,3.49,0.68,10.5 +7.4,0.74,0.07,1.7,0.086,15.0,48.0,0.99502,3.12,0.48,10.0 +7.4,0.55,0.19,1.8,0.082,15.0,34.0,0.99655,3.49,0.68,10.5 +6.9,0.41,0.33,2.2,0.081,22.0,36.0,0.9949,3.41,0.75,11.1 +7.1,0.6,0.01,2.3,0.079,24.0,37.0,0.99514,3.4,0.61,10.9 +7.1,0.6,0.01,2.3,0.079,24.0,37.0,0.99514,3.4,0.61,10.9 +7.5,0.58,0.14,2.2,0.077,27.0,60.0,0.9963,3.28,0.59,9.8 +7.1,0.72,0.0,1.8,0.123,6.0,14.0,0.99627,3.45,0.58,9.8 +7.9,0.66,0.0,1.4,0.096,6.0,13.0,0.99569,3.43,0.58,9.5 +7.8,0.7,0.06,1.9,0.079,20.0,35.0,0.99628,3.4,0.69,10.9 +6.1,0.64,0.02,2.4,0.069,26.0,46.0,0.99358,3.47,0.45,11.0 +7.5,0.59,0.22,1.8,0.082,43.0,60.0,0.99499,3.1,0.42,9.2 +7.0,0.58,0.28,4.8,0.085,12.0,69.0,0.99633,3.32,0.7,11.0 +6.8,0.64,0.0,2.7,0.123,15.0,33.0,0.99538,3.44,0.63,11.3 +6.8,0.64,0.0,2.7,0.123,15.0,33.0,0.99538,3.44,0.63,11.3 +8.6,0.635,0.68,1.8,0.403,19.0,56.0,0.99632,3.02,1.15,9.3 +6.3,1.02,0.0,2.0,0.083,17.0,24.0,0.99437,3.59,0.55,11.2 +9.8,0.45,0.38,2.5,0.081,34.0,66.0,0.99726,3.15,0.58,9.8 +8.2,0.78,0.0,2.2,0.089,13.0,26.0,0.9978,3.37,0.46,9.6 +8.5,0.37,0.32,1.8,0.066,26.0,51.0,0.99456,3.38,0.72,11.8 +7.2,0.57,0.05,2.3,0.081,16.0,36.0,0.99564,3.38,0.6,10.3 +7.2,0.57,0.05,2.3,0.081,16.0,36.0,0.99564,3.38,0.6,10.3 +10.4,0.43,0.5,2.3,0.068,13.0,19.0,0.996,3.1,0.87,11.4 +6.9,0.41,0.31,2.0,0.079,21.0,51.0,0.99668,3.47,0.55,9.5 +5.5,0.49,0.03,1.8,0.044,28.0,87.0,0.9908,3.5,0.82,14.0 +5.0,0.38,0.01,1.6,0.048,26.0,60.0,0.99084,3.7,0.75,14.0 +7.3,0.44,0.2,1.6,0.049,24.0,64.0,0.9935,3.38,0.57,11.7 +5.9,0.46,0.0,1.9,0.077,25.0,44.0,0.99385,3.5,0.53,11.2 +7.5,0.58,0.2,2.0,0.073,34.0,44.0,0.99494,3.1,0.43,9.3 +7.8,0.58,0.13,2.1,0.102,17.0,36.0,0.9944,3.24,0.53,11.2 +8.0,0.715,0.22,2.3,0.075,13.0,81.0,0.99688,3.24,0.54,9.5 +8.5,0.4,0.4,6.3,0.05,3.0,10.0,0.99566,3.28,0.56,12.0 +7.0,0.69,0.0,1.9,0.114,3.0,10.0,0.99636,3.35,0.6,9.7 +8.0,0.715,0.22,2.3,0.075,13.0,81.0,0.99688,3.24,0.54,9.5 +9.8,0.3,0.39,1.7,0.062,3.0,9.0,0.9948,3.14,0.57,11.5 +7.1,0.46,0.2,1.9,0.077,28.0,54.0,0.9956,3.37,0.64,10.4 +7.1,0.46,0.2,1.9,0.077,28.0,54.0,0.9956,3.37,0.64,10.4 +7.9,0.765,0.0,2.0,0.084,9.0,22.0,0.99619,3.33,0.68,10.9 +8.7,0.63,0.28,2.7,0.096,17.0,69.0,0.99734,3.26,0.63,10.2 +7.0,0.42,0.19,2.3,0.071,18.0,36.0,0.99476,3.39,0.56,10.9 +11.3,0.37,0.5,1.8,0.09,20.0,47.0,0.99734,3.15,0.57,10.5 +7.1,0.16,0.44,2.5,0.068,17.0,31.0,0.99328,3.35,0.54,12.4 +8.0,0.6,0.08,2.6,0.056,3.0,7.0,0.99286,3.22,0.37,13.0 +7.0,0.6,0.3,4.5,0.068,20.0,110.0,0.99914,3.3,1.17,10.2 +7.0,0.6,0.3,4.5,0.068,20.0,110.0,0.99914,3.3,1.17,10.2 +7.6,0.74,0.0,1.9,0.1,6.0,12.0,0.99521,3.36,0.59,11.0 +8.2,0.635,0.1,2.1,0.073,25.0,60.0,0.99638,3.29,0.75,10.9 +5.9,0.395,0.13,2.4,0.056,14.0,28.0,0.99362,3.62,0.67,12.4 +7.5,0.755,0.0,1.9,0.084,6.0,12.0,0.99672,3.34,0.49,9.7 +8.2,0.635,0.1,2.1,0.073,25.0,60.0,0.99638,3.29,0.75,10.9 +6.6,0.63,0.0,4.3,0.093,51.0,77.5,0.99558,3.2,0.45,9.5 +6.6,0.63,0.0,4.3,0.093,51.0,77.5,0.99558,3.2,0.45,9.5 +7.2,0.53,0.14,2.1,0.064,15.0,29.0,0.99323,3.35,0.61,12.1 +5.7,0.6,0.0,1.4,0.063,11.0,18.0,0.99191,3.45,0.56,12.2 +7.6,1.58,0.0,2.1,0.137,5.0,9.0,0.99476,3.5,0.4,10.9 +5.2,0.645,0.0,2.15,0.08,15.0,28.0,0.99444,3.78,0.61,12.5 +6.7,0.86,0.07,2.0,0.1,20.0,57.0,0.99598,3.6,0.74,11.7 +9.1,0.37,0.32,2.1,0.064,4.0,15.0,0.99576,3.3,0.8,11.2 +8.0,0.28,0.44,1.8,0.081,28.0,68.0,0.99501,3.36,0.66,11.2 +7.6,0.79,0.21,2.3,0.087,21.0,68.0,0.9955,3.12,0.44,9.2 +7.5,0.61,0.26,1.9,0.073,24.0,88.0,0.99612,3.3,0.53,9.8 +9.7,0.69,0.32,2.5,0.088,22.0,91.0,0.9979,3.29,0.62,10.1 +6.8,0.68,0.09,3.9,0.068,15.0,29.0,0.99524,3.41,0.52,11.1 +9.7,0.69,0.32,2.5,0.088,22.0,91.0,0.9979,3.29,0.62,10.1 +7.0,0.62,0.1,1.4,0.071,27.0,63.0,0.996,3.28,0.61,9.2 +7.5,0.61,0.26,1.9,0.073,24.0,88.0,0.99612,3.3,0.53,9.8 +6.5,0.51,0.15,3.0,0.064,12.0,27.0,0.9929,3.33,0.59,12.8 +8.0,1.18,0.21,1.9,0.083,14.0,41.0,0.99532,3.34,0.47,10.5 +7.0,0.36,0.21,2.3,0.086,20.0,65.0,0.99558,3.4,0.54,10.1 +7.0,0.36,0.21,2.4,0.086,24.0,69.0,0.99556,3.4,0.53,10.1 +7.5,0.63,0.27,2.0,0.083,17.0,91.0,0.99616,3.26,0.58,9.8 +5.4,0.74,0.0,1.2,0.041,16.0,46.0,0.99258,4.01,0.59,12.5 +9.9,0.44,0.46,2.2,0.091,10.0,41.0,0.99638,3.18,0.69,11.9 +7.5,0.63,0.27,2.0,0.083,17.0,91.0,0.99616,3.26,0.58,9.8 +9.1,0.76,0.68,1.7,0.414,18.0,64.0,0.99652,2.9,1.33,9.1 +9.7,0.66,0.34,2.6,0.094,12.0,88.0,0.99796,3.26,0.66,10.1 +5.0,0.74,0.0,1.2,0.041,16.0,46.0,0.99258,4.01,0.59,12.5 +9.1,0.34,0.42,1.8,0.058,9.0,18.0,0.99392,3.18,0.55,11.4 +9.1,0.36,0.39,1.8,0.06,21.0,55.0,0.99495,3.18,0.82,11.0 +6.7,0.46,0.24,1.7,0.077,18.0,34.0,0.9948,3.39,0.6,10.6 +6.7,0.46,0.24,1.7,0.077,18.0,34.0,0.9948,3.39,0.6,10.6 +6.7,0.46,0.24,1.7,0.077,18.0,34.0,0.9948,3.39,0.6,10.6 +6.7,0.46,0.24,1.7,0.077,18.0,34.0,0.9948,3.39,0.6,10.6 +6.5,0.52,0.11,1.8,0.073,13.0,38.0,0.9955,3.34,0.52,9.3 +7.4,0.6,0.26,2.1,0.083,17.0,91.0,0.99616,3.29,0.56,9.8 +7.4,0.6,0.26,2.1,0.083,17.0,91.0,0.99616,3.29,0.56,9.8 +7.8,0.87,0.26,3.8,0.107,31.0,67.0,0.99668,3.26,0.46,9.2 +8.4,0.39,0.1,1.7,0.075,6.0,25.0,0.99581,3.09,0.43,9.7 +9.1,0.775,0.22,2.2,0.079,12.0,48.0,0.9976,3.18,0.51,9.6 +7.2,0.835,0.0,2.0,0.166,4.0,11.0,0.99608,3.39,0.52,10.0 +6.6,0.58,0.02,2.4,0.069,19.0,40.0,0.99387,3.38,0.66,12.6 +6.0,0.5,0.0,1.4,0.057,15.0,26.0,0.99448,3.36,0.45,9.5 +6.0,0.5,0.0,1.4,0.057,15.0,26.0,0.99448,3.36,0.45,9.5 +6.0,0.5,0.0,1.4,0.057,15.0,26.0,0.99448,3.36,0.45,9.5 +7.5,0.51,0.02,1.7,0.084,13.0,31.0,0.99538,3.36,0.54,10.5 +7.5,0.51,0.02,1.7,0.084,13.0,31.0,0.99538,3.36,0.54,10.5 +7.5,0.51,0.02,1.7,0.084,13.0,31.0,0.99538,3.36,0.54,10.5 +7.6,0.54,0.02,1.7,0.085,17.0,31.0,0.99589,3.37,0.51,10.4 +7.5,0.51,0.02,1.7,0.084,13.0,31.0,0.99538,3.36,0.54,10.5 +11.5,0.42,0.48,2.6,0.077,8.0,20.0,0.99852,3.09,0.53,11.0 +8.2,0.44,0.24,2.3,0.063,10.0,28.0,0.99613,3.25,0.53,10.2 +6.1,0.59,0.01,2.1,0.056,5.0,13.0,0.99472,3.52,0.56,11.4 +7.2,0.655,0.03,1.8,0.078,7.0,12.0,0.99587,3.34,0.39,9.5 +7.2,0.655,0.03,1.8,0.078,7.0,12.0,0.99587,3.34,0.39,9.5 +6.9,0.57,0.0,2.8,0.081,21.0,41.0,0.99518,3.41,0.52,10.8 +9.0,0.6,0.29,2.0,0.069,32.0,73.0,0.99654,3.34,0.57,10.0 +7.2,0.62,0.01,2.3,0.065,8.0,46.0,0.99332,3.32,0.51,11.8 +7.6,0.645,0.03,1.9,0.086,14.0,57.0,0.9969,3.37,0.46,10.3 +7.6,0.645,0.03,1.9,0.086,14.0,57.0,0.9969,3.37,0.46,10.3 +7.2,0.58,0.03,2.3,0.077,7.0,28.0,0.99568,3.35,0.52,10.0 +6.1,0.32,0.25,1.8,0.086,5.0,32.0,0.99464,3.36,0.44,10.1 +6.1,0.34,0.25,1.8,0.084,4.0,28.0,0.99464,3.36,0.44,10.1 +7.3,0.43,0.24,2.5,0.078,27.0,67.0,0.99648,3.6,0.59,11.1 +7.4,0.64,0.17,5.4,0.168,52.0,98.0,0.99736,3.28,0.5,9.5 +11.6,0.475,0.4,1.4,0.091,6.0,28.0,0.99704,3.07,0.65,10.03333333 +9.2,0.54,0.31,2.3,0.112,11.0,38.0,0.99699,3.24,0.56,10.9 +8.3,0.85,0.14,2.5,0.093,13.0,54.0,0.99724,3.36,0.54,10.1 +11.6,0.475,0.4,1.4,0.091,6.0,28.0,0.99704,3.07,0.65,10.03333333 +8.0,0.83,0.27,2.0,0.08,11.0,63.0,0.99652,3.29,0.48,9.8 +7.2,0.605,0.02,1.9,0.096,10.0,31.0,0.995,3.46,0.53,11.8 +7.8,0.5,0.09,2.2,0.115,10.0,42.0,0.9971,3.18,0.62,9.5 +7.3,0.74,0.08,1.7,0.094,10.0,45.0,0.99576,3.24,0.5,9.8 +6.9,0.54,0.3,2.2,0.088,9.0,105.0,0.99725,3.25,1.18,10.5 +8.0,0.77,0.32,2.1,0.079,16.0,74.0,0.99656,3.27,0.5,9.8 +6.6,0.61,0.0,1.6,0.069,4.0,8.0,0.99396,3.33,0.37,10.4 +8.7,0.78,0.51,1.7,0.415,12.0,66.0,0.99623,3.0,1.17,9.2 +7.5,0.58,0.56,3.1,0.153,5.0,14.0,0.99476,3.21,1.03,11.6 +8.7,0.78,0.51,1.7,0.415,12.0,66.0,0.99623,3.0,1.17,9.2 +7.7,0.75,0.27,3.8,0.11,34.0,89.0,0.99664,3.24,0.45,9.3 +6.8,0.815,0.0,1.2,0.267,16.0,29.0,0.99471,3.32,0.51,9.8 +7.2,0.56,0.26,2.0,0.083,13.0,100.0,0.99586,3.26,0.52,9.9 +8.2,0.885,0.2,1.4,0.086,7.0,31.0,0.9946,3.11,0.46,10.0 +5.2,0.49,0.26,2.3,0.09,23.0,74.0,0.9953,3.71,0.62,12.2 +7.2,0.45,0.15,2.0,0.078,10.0,28.0,0.99609,3.29,0.51,9.9 +7.5,0.57,0.02,2.6,0.077,11.0,35.0,0.99557,3.36,0.62,10.8 +7.5,0.57,0.02,2.6,0.077,11.0,35.0,0.99557,3.36,0.62,10.8 +6.8,0.83,0.09,1.8,0.074,4.0,25.0,0.99534,3.38,0.45,9.6 +8.0,0.6,0.22,2.1,0.08,25.0,105.0,0.99613,3.3,0.49,9.9 +8.0,0.6,0.22,2.1,0.08,25.0,105.0,0.99613,3.3,0.49,9.9 +7.1,0.755,0.15,1.8,0.107,20.0,84.0,0.99593,3.19,0.5,9.5 +8.0,0.81,0.25,3.4,0.076,34.0,85.0,0.99668,3.19,0.42,9.2 +7.4,0.64,0.07,1.8,0.1,8.0,23.0,0.9961,3.3,0.58,9.6 +7.4,0.64,0.07,1.8,0.1,8.0,23.0,0.9961,3.3,0.58,9.6 +6.6,0.64,0.31,6.1,0.083,7.0,49.0,0.99718,3.35,0.68,10.3 +6.7,0.48,0.02,2.2,0.08,36.0,111.0,0.99524,3.1,0.53,9.7 +6.0,0.49,0.0,2.3,0.068,15.0,33.0,0.99292,3.58,0.59,12.5 +8.0,0.64,0.22,2.4,0.094,5.0,33.0,0.99612,3.37,0.58,11.0 +7.1,0.62,0.06,1.3,0.07,5.0,12.0,0.9942,3.17,0.48,9.8 +8.0,0.52,0.25,2.0,0.078,19.0,59.0,0.99612,3.3,0.48,10.2 +6.4,0.57,0.14,3.9,0.07,27.0,73.0,0.99669,3.32,0.48,9.2 +8.6,0.685,0.1,1.6,0.092,3.0,12.0,0.99745,3.31,0.65,9.55 +8.7,0.675,0.1,1.6,0.09,4.0,11.0,0.99745,3.31,0.65,9.55 +7.3,0.59,0.26,2.0,0.08,17.0,104.0,0.99584,3.28,0.52,9.9 +7.0,0.6,0.12,2.2,0.083,13.0,28.0,0.9966,3.52,0.62,10.2 +7.2,0.67,0.0,2.2,0.068,10.0,24.0,0.9956,3.42,0.72,11.1 +7.9,0.69,0.21,2.1,0.08,33.0,141.0,0.9962,3.25,0.51,9.9 +7.9,0.69,0.21,2.1,0.08,33.0,141.0,0.9962,3.25,0.51,9.9 +7.6,0.3,0.42,2.0,0.052,6.0,24.0,0.9963,3.44,0.82,11.9 +7.2,0.33,0.33,1.7,0.061,3.0,13.0,0.996,3.23,1.1,10.0 +8.0,0.5,0.39,2.6,0.082,12.0,46.0,0.9985,3.43,0.62,10.7 +7.7,0.28,0.3,2.0,0.062,18.0,34.0,0.9952,3.28,0.9,11.3 +8.2,0.24,0.34,5.1,0.062,8.0,22.0,0.9974,3.22,0.94,10.9 +6.0,0.51,0.0,2.1,0.064,40.0,54.0,0.995,3.54,0.93,10.7 +8.1,0.29,0.36,2.2,0.048,35.0,53.0,0.995,3.27,1.01,12.4 +6.0,0.51,0.0,2.1,0.064,40.0,54.0,0.995,3.54,0.93,10.7 +6.6,0.96,0.0,1.8,0.082,5.0,16.0,0.9936,3.5,0.44,11.9 +6.4,0.47,0.4,2.4,0.071,8.0,19.0,0.9963,3.56,0.73,10.6 +8.2,0.24,0.34,5.1,0.062,8.0,22.0,0.9974,3.22,0.94,10.9 +9.9,0.57,0.25,2.0,0.104,12.0,89.0,0.9963,3.04,0.9,10.1 +10.0,0.32,0.59,2.2,0.077,3.0,15.0,0.9994,3.2,0.78,9.6 +6.2,0.58,0.0,1.6,0.065,8.0,18.0,0.9966,3.56,0.84,9.4 +10.0,0.32,0.59,2.2,0.077,3.0,15.0,0.9994,3.2,0.78,9.6 +7.3,0.34,0.33,2.5,0.064,21.0,37.0,0.9952,3.35,0.77,12.1 +7.8,0.53,0.01,1.6,0.077,3.0,19.0,0.995,3.16,0.46,9.8 +7.7,0.64,0.21,2.2,0.077,32.0,133.0,0.9956,3.27,0.45,9.9 +7.8,0.53,0.01,1.6,0.077,3.0,19.0,0.995,3.16,0.46,9.8 +7.5,0.4,0.18,1.6,0.079,24.0,58.0,0.9965,3.34,0.58,9.4 +7.0,0.54,0.0,2.1,0.079,39.0,55.0,0.9956,3.39,0.84,11.4 +6.4,0.53,0.09,3.9,0.123,14.0,31.0,0.9968,3.5,0.67,11.0 +8.3,0.26,0.37,1.4,0.076,8.0,23.0,0.9974,3.26,0.7,9.6 +8.3,0.26,0.37,1.4,0.076,8.0,23.0,0.9974,3.26,0.7,9.6 +7.7,0.23,0.37,1.8,0.046,23.0,60.0,0.9971,3.41,0.71,12.1 +7.6,0.41,0.33,2.5,0.078,6.0,23.0,0.9957,3.3,0.58,11.2 +7.8,0.64,0.0,1.9,0.072,27.0,55.0,0.9962,3.31,0.63,11.0 +7.9,0.18,0.4,2.2,0.049,38.0,67.0,0.996,3.33,0.93,11.3 +7.4,0.41,0.24,1.8,0.066,18.0,47.0,0.9956,3.37,0.62,10.4 +7.6,0.43,0.31,2.1,0.069,13.0,74.0,0.9958,3.26,0.54,9.9 +5.9,0.44,0.0,1.6,0.042,3.0,11.0,0.9944,3.48,0.85,11.7 +6.1,0.4,0.16,1.8,0.069,11.0,25.0,0.9955,3.42,0.74,10.1 +10.2,0.54,0.37,15.4,0.214,55.0,95.0,1.00369,3.18,0.77,9.0 +10.2,0.54,0.37,15.4,0.214,55.0,95.0,1.00369,3.18,0.77,9.0 +10.0,0.38,0.38,1.6,0.169,27.0,90.0,0.99914,3.15,0.65,8.5 +6.8,0.915,0.29,4.8,0.07,15.0,39.0,0.99577,3.53,0.54,11.1 +7.0,0.59,0.0,1.7,0.052,3.0,8.0,0.996,3.41,0.47,10.3 +7.3,0.67,0.02,2.2,0.072,31.0,92.0,0.99566,3.32,0.68,11.06666667 +7.2,0.37,0.32,2.0,0.062,15.0,28.0,0.9947,3.23,0.73,11.3 +7.4,0.785,0.19,5.2,0.094,19.0,98.0,0.99713,3.16,0.52,9.566666667 +6.9,0.63,0.02,1.9,0.078,18.0,30.0,0.99712,3.4,0.75,9.8 +6.9,0.58,0.2,1.75,0.058,8.0,22.0,0.99322,3.38,0.49,11.7 +7.3,0.67,0.02,2.2,0.072,31.0,92.0,0.99566,3.32,0.68,11.1 +7.4,0.785,0.19,5.2,0.094,19.0,98.0,0.99713,3.16,0.52,9.6 +6.9,0.63,0.02,1.9,0.078,18.0,30.0,0.99712,3.4,0.75,9.8 +6.8,0.67,0.0,1.9,0.08,22.0,39.0,0.99701,3.4,0.74,9.7 +6.9,0.58,0.01,1.9,0.08,40.0,54.0,0.99683,3.4,0.73,9.7 +7.2,0.38,0.31,2.0,0.056,15.0,29.0,0.99472,3.23,0.76,11.3 +7.2,0.37,0.32,2.0,0.062,15.0,28.0,0.9947,3.23,0.73,11.3 +7.8,0.32,0.44,2.7,0.104,8.0,17.0,0.99732,3.33,0.78,11.0 +6.6,0.58,0.02,2.0,0.062,37.0,53.0,0.99374,3.35,0.76,11.6 +7.6,0.49,0.33,1.9,0.074,27.0,85.0,0.99706,3.41,0.58,9.0 +11.7,0.45,0.63,2.2,0.073,7.0,23.0,0.99974,3.21,0.69,10.9 +6.5,0.9,0.0,1.6,0.052,9.0,17.0,0.99467,3.5,0.63,10.9 +6.0,0.54,0.06,1.8,0.05,38.0,89.0,0.99236,3.3,0.5,10.55 +7.6,0.49,0.33,1.9,0.074,27.0,85.0,0.99706,3.41,0.58,9.0 +8.4,0.29,0.4,1.7,0.067,8.0,20.0,0.99603,3.39,0.6,10.5 +7.9,0.2,0.35,1.7,0.054,7.0,15.0,0.99458,3.32,0.8,11.9 +6.4,0.42,0.09,2.3,0.054,34.0,64.0,0.99724,3.41,0.68,10.4 +6.2,0.785,0.0,2.1,0.06,6.0,13.0,0.99664,3.59,0.61,10.0 +6.8,0.64,0.03,2.3,0.075,14.0,31.0,0.99545,3.36,0.58,10.4 +6.9,0.63,0.01,2.4,0.076,14.0,39.0,0.99522,3.34,0.53,10.8 +6.8,0.59,0.1,1.7,0.063,34.0,53.0,0.9958,3.41,0.67,9.7 +6.8,0.59,0.1,1.7,0.063,34.0,53.0,0.9958,3.41,0.67,9.7 +7.3,0.48,0.32,2.1,0.062,31.0,54.0,0.99728,3.3,0.65,10.0 +6.7,1.04,0.08,2.3,0.067,19.0,32.0,0.99648,3.52,0.57,11.0 +7.3,0.48,0.32,2.1,0.062,31.0,54.0,0.99728,3.3,0.65,10.0 +7.3,0.98,0.05,2.1,0.061,20.0,49.0,0.99705,3.31,0.55,9.7 +10.0,0.69,0.11,1.4,0.084,8.0,24.0,0.99578,2.88,0.47,9.7 +6.7,0.7,0.08,3.75,0.067,8.0,16.0,0.99334,3.43,0.52,12.6 +7.6,0.35,0.6,2.6,0.073,23.0,44.0,0.99656,3.38,0.79,11.1 +6.1,0.6,0.08,1.8,0.071,14.0,45.0,0.99336,3.38,0.54,11.0 +9.9,0.5,0.5,13.8,0.205,48.0,82.0,1.00242,3.16,0.75,8.8 +5.3,0.47,0.11,2.2,0.048,16.0,89.0,0.99182,3.54,0.88,13.56666667 +9.9,0.5,0.5,13.8,0.205,48.0,82.0,1.00242,3.16,0.75,8.8 +5.3,0.47,0.11,2.2,0.048,16.0,89.0,0.99182,3.54,0.88,13.6 +7.1,0.875,0.05,5.7,0.082,3.0,14.0,0.99808,3.4,0.52,10.2 +8.2,0.28,0.6,3.0,0.104,10.0,22.0,0.99828,3.39,0.68,10.6 +5.6,0.62,0.03,1.5,0.08,6.0,13.0,0.99498,3.66,0.62,10.1 +8.2,0.28,0.6,3.0,0.104,10.0,22.0,0.99828,3.39,0.68,10.6 +7.2,0.58,0.54,2.1,0.114,3.0,9.0,0.99719,3.33,0.57,10.3 +8.1,0.33,0.44,1.5,0.042,6.0,12.0,0.99542,3.35,0.61,10.7 +6.8,0.91,0.06,2.0,0.06,4.0,11.0,0.99592,3.53,0.64,10.9 +7.0,0.655,0.16,2.1,0.074,8.0,25.0,0.99606,3.37,0.55,9.7 +6.8,0.68,0.21,2.1,0.07,9.0,23.0,0.99546,3.38,0.6,10.3 +6.0,0.64,0.05,1.9,0.066,9.0,17.0,0.99496,3.52,0.78,10.6 +5.6,0.54,0.04,1.7,0.049,5.0,13.0,0.9942,3.72,0.58,11.4 +6.2,0.57,0.1,2.1,0.048,4.0,11.0,0.99448,3.44,0.76,10.8 +7.1,0.22,0.49,1.8,0.039,8.0,18.0,0.99344,3.39,0.56,12.4 +5.6,0.54,0.04,1.7,0.049,5.0,13.0,0.9942,3.72,0.58,11.4 +6.2,0.65,0.06,1.6,0.05,6.0,18.0,0.99348,3.57,0.54,11.95 +7.7,0.54,0.26,1.9,0.089,23.0,147.0,0.99636,3.26,0.59,9.7 +6.4,0.31,0.09,1.4,0.066,15.0,28.0,0.99459,3.42,0.7,10.0 +7.0,0.43,0.02,1.9,0.08,15.0,28.0,0.99492,3.35,0.81,10.6 +7.7,0.54,0.26,1.9,0.089,23.0,147.0,0.99636,3.26,0.59,9.7 +6.9,0.74,0.03,2.3,0.054,7.0,16.0,0.99508,3.45,0.63,11.5 +6.6,0.895,0.04,2.3,0.068,7.0,13.0,0.99582,3.53,0.58,10.8 +6.9,0.74,0.03,2.3,0.054,7.0,16.0,0.99508,3.45,0.63,11.5 +7.5,0.725,0.04,1.5,0.076,8.0,15.0,0.99508,3.26,0.53,9.6 +7.8,0.82,0.29,4.3,0.083,21.0,64.0,0.99642,3.16,0.53,9.4 +7.3,0.585,0.18,2.4,0.078,15.0,60.0,0.99638,3.31,0.54,9.8 +6.2,0.44,0.39,2.5,0.077,6.0,14.0,0.99555,3.51,0.69,11.0 +7.5,0.38,0.57,2.3,0.106,5.0,12.0,0.99605,3.36,0.55,11.4 +6.7,0.76,0.02,1.8,0.078,6.0,12.0,0.996,3.55,0.63,9.95 +6.8,0.81,0.05,2.0,0.07,6.0,14.0,0.99562,3.51,0.66,10.8 +7.5,0.38,0.57,2.3,0.106,5.0,12.0,0.99605,3.36,0.55,11.4 +7.1,0.27,0.6,2.1,0.074,17.0,25.0,0.99814,3.38,0.72,10.6 +7.9,0.18,0.4,1.8,0.062,7.0,20.0,0.9941,3.28,0.7,11.1 +6.4,0.36,0.21,2.2,0.047,26.0,48.0,0.99661,3.47,0.77,9.7 +7.1,0.69,0.04,2.1,0.068,19.0,27.0,0.99712,3.44,0.67,9.8 +6.4,0.79,0.04,2.2,0.061,11.0,17.0,0.99588,3.53,0.65,10.4 +6.4,0.56,0.15,1.8,0.078,17.0,65.0,0.99294,3.33,0.6,10.5 +6.9,0.84,0.21,4.1,0.074,16.0,65.0,0.99842,3.53,0.72,9.233333333 +6.9,0.84,0.21,4.1,0.074,16.0,65.0,0.99842,3.53,0.72,9.25 +6.1,0.32,0.25,2.3,0.071,23.0,58.0,0.99633,3.42,0.97,10.6 +6.5,0.53,0.06,2.0,0.063,29.0,44.0,0.99489,3.38,0.83,10.3 +7.4,0.47,0.46,2.2,0.114,7.0,20.0,0.99647,3.32,0.63,10.5 +6.6,0.7,0.08,2.6,0.106,14.0,27.0,0.99665,3.44,0.58,10.2 +6.5,0.53,0.06,2.0,0.063,29.0,44.0,0.99489,3.38,0.83,10.3 +6.9,0.48,0.2,1.9,0.082,9.0,23.0,0.99585,3.39,0.43,9.05 +6.1,0.32,0.25,2.3,0.071,23.0,58.0,0.99633,3.42,0.97,10.6 +6.8,0.48,0.25,2.0,0.076,29.0,61.0,0.9953,3.34,0.6,10.4 +6.0,0.42,0.19,2.0,0.075,22.0,47.0,0.99522,3.39,0.78,10.0 +6.7,0.48,0.08,2.1,0.064,18.0,34.0,0.99552,3.33,0.64,9.7 +6.8,0.47,0.08,2.2,0.064,18.0,38.0,0.99553,3.3,0.65,9.6 +7.1,0.53,0.07,1.7,0.071,15.0,24.0,0.9951,3.29,0.66,10.8 +7.9,0.29,0.49,2.2,0.096,21.0,59.0,0.99714,3.31,0.67,10.1 +7.1,0.69,0.08,2.1,0.063,42.0,52.0,0.99608,3.42,0.6,10.2 +6.6,0.44,0.09,2.2,0.063,9.0,18.0,0.99444,3.42,0.69,11.3 +6.1,0.705,0.1,2.8,0.081,13.0,28.0,0.99631,3.6,0.66,10.2 +7.2,0.53,0.13,2.0,0.058,18.0,22.0,0.99573,3.21,0.68,9.9 +8.0,0.39,0.3,1.9,0.074,32.0,84.0,0.99717,3.39,0.61,9.0 +6.6,0.56,0.14,2.4,0.064,13.0,29.0,0.99397,3.42,0.62,11.7 +7.0,0.55,0.13,2.2,0.075,15.0,35.0,0.9959,3.36,0.59,9.7 +6.1,0.53,0.08,1.9,0.077,24.0,45.0,0.99528,3.6,0.68,10.3 +5.4,0.58,0.08,1.9,0.059,20.0,31.0,0.99484,3.5,0.64,10.2 +6.2,0.64,0.09,2.5,0.081,15.0,26.0,0.99538,3.57,0.63,12.0 +7.2,0.39,0.32,1.8,0.065,34.0,60.0,0.99714,3.46,0.78,9.9 +6.2,0.52,0.08,4.4,0.071,11.0,32.0,0.99646,3.56,0.63,11.6 +7.4,0.25,0.29,2.2,0.054,19.0,49.0,0.99666,3.4,0.76,10.9 +6.7,0.855,0.02,1.9,0.064,29.0,38.0,0.99472,3.3,0.56,10.75 +11.1,0.44,0.42,2.2,0.064,14.0,19.0,0.99758,3.25,0.57,10.4 +8.4,0.37,0.43,2.3,0.063,12.0,19.0,0.9955,3.17,0.81,11.2 +6.5,0.63,0.33,1.8,0.059,16.0,28.0,0.99531,3.36,0.64,10.1 +7.0,0.57,0.02,2.0,0.072,17.0,26.0,0.99575,3.36,0.61,10.2 +6.3,0.6,0.1,1.6,0.048,12.0,26.0,0.99306,3.55,0.51,12.1 +11.2,0.4,0.5,2.0,0.099,19.0,50.0,0.99783,3.1,0.58,10.4 +7.4,0.36,0.3,1.8,0.074,17.0,24.0,0.99419,3.24,0.7,11.4 +7.1,0.68,0.0,2.3,0.087,17.0,26.0,0.99783,3.45,0.53,9.5 +7.1,0.67,0.0,2.3,0.083,18.0,27.0,0.99768,3.44,0.54,9.4 +6.3,0.68,0.01,3.7,0.103,32.0,54.0,0.99586,3.51,0.66,11.3 +7.3,0.735,0.0,2.2,0.08,18.0,28.0,0.99765,3.41,0.6,9.4 +6.6,0.855,0.02,2.4,0.062,15.0,23.0,0.99627,3.54,0.6,11.0 +7.0,0.56,0.17,1.7,0.065,15.0,24.0,0.99514,3.44,0.68,10.55 +6.6,0.88,0.04,2.2,0.066,12.0,20.0,0.99636,3.53,0.56,9.9 +6.6,0.855,0.02,2.4,0.062,15.0,23.0,0.99627,3.54,0.6,11.0 +6.9,0.63,0.33,6.7,0.235,66.0,115.0,0.99787,3.22,0.56,9.5 +7.8,0.6,0.26,2.0,0.08,31.0,131.0,0.99622,3.21,0.52,9.9 +7.8,0.6,0.26,2.0,0.08,31.0,131.0,0.99622,3.21,0.52,9.9 +7.8,0.6,0.26,2.0,0.08,31.0,131.0,0.99622,3.21,0.52,9.9 +7.2,0.695,0.13,2.0,0.076,12.0,20.0,0.99546,3.29,0.54,10.1 +7.2,0.695,0.13,2.0,0.076,12.0,20.0,0.99546,3.29,0.54,10.1 +7.2,0.695,0.13,2.0,0.076,12.0,20.0,0.99546,3.29,0.54,10.1 +6.7,0.67,0.02,1.9,0.061,26.0,42.0,0.99489,3.39,0.82,10.9 +6.7,0.16,0.64,2.1,0.059,24.0,52.0,0.99494,3.34,0.71,11.2 +7.2,0.695,0.13,2.0,0.076,12.0,20.0,0.99546,3.29,0.54,10.1 +7.0,0.56,0.13,1.6,0.077,25.0,42.0,0.99629,3.34,0.59,9.2 +6.2,0.51,0.14,1.9,0.056,15.0,34.0,0.99396,3.48,0.57,11.5 +6.4,0.36,0.53,2.2,0.23,19.0,35.0,0.9934,3.37,0.93,12.4 +6.4,0.38,0.14,2.2,0.038,15.0,25.0,0.99514,3.44,0.65,11.1 +7.3,0.69,0.32,2.2,0.069,35.0,104.0,0.99632,3.33,0.51,9.5 +6.0,0.58,0.2,2.4,0.075,15.0,50.0,0.99467,3.58,0.67,12.5 +5.6,0.31,0.78,13.9,0.074,23.0,92.0,0.99677,3.39,0.48,10.5 +7.5,0.52,0.4,2.2,0.06,12.0,20.0,0.99474,3.26,0.64,11.8 +8.0,0.3,0.63,1.6,0.081,16.0,29.0,0.99588,3.3,0.78,10.8 +6.2,0.7,0.15,5.1,0.076,13.0,27.0,0.99622,3.54,0.6,11.9 +6.8,0.67,0.15,1.8,0.118,13.0,20.0,0.9954,3.42,0.67,11.3 +6.2,0.56,0.09,1.7,0.053,24.0,32.0,0.99402,3.54,0.6,11.3 +7.4,0.35,0.33,2.4,0.068,9.0,26.0,0.9947,3.36,0.6,11.9 +6.2,0.56,0.09,1.7,0.053,24.0,32.0,0.99402,3.54,0.6,11.3 +6.1,0.715,0.1,2.6,0.053,13.0,27.0,0.99362,3.57,0.5,11.9 +6.2,0.46,0.29,2.1,0.074,32.0,98.0,0.99578,3.33,0.62,9.8 +6.7,0.32,0.44,2.4,0.061,24.0,34.0,0.99484,3.29,0.8,11.6 +7.2,0.39,0.44,2.6,0.066,22.0,48.0,0.99494,3.3,0.84,11.5 +7.5,0.31,0.41,2.4,0.065,34.0,60.0,0.99492,3.34,0.85,11.4 +5.8,0.61,0.11,1.8,0.066,18.0,28.0,0.99483,3.55,0.66,10.9 +7.2,0.66,0.33,2.5,0.068,34.0,102.0,0.99414,3.27,0.78,12.8 +6.6,0.725,0.2,7.8,0.073,29.0,79.0,0.9977,3.29,0.54,9.2 +6.3,0.55,0.15,1.8,0.077,26.0,35.0,0.99314,3.32,0.82,11.6 +5.4,0.74,0.09,1.7,0.089,16.0,26.0,0.99402,3.67,0.56,11.6 +6.3,0.51,0.13,2.3,0.076,29.0,40.0,0.99574,3.42,0.75,11.0 +6.8,0.62,0.08,1.9,0.068,28.0,38.0,0.99651,3.42,0.82,9.5 +6.2,0.6,0.08,2.0,0.09,32.0,44.0,0.9949,3.45,0.58,10.5 +5.9,0.55,0.1,2.2,0.062,39.0,51.0,0.99512,3.52,0.76,11.2 +6.3,0.51,0.13,2.3,0.076,29.0,40.0,0.99574,3.42,0.75,11.0 +5.9,0.645,0.12,2.0,0.075,32.0,44.0,0.99547,3.57,0.71,10.2 +6.0,0.31,0.47,3.6,0.067,18.0,42.0,0.99549,3.39,0.66,11.0 +7.0,0.27,0.36,20.7,0.045,45.0,170.0,1.001,3.0,0.45,8.8 +6.3,0.3,0.34,1.6,0.049,14.0,132.0,0.994,3.3,0.49,9.5 +8.1,0.28,0.4,6.9,0.05,30.0,97.0,0.9951,3.26,0.44,10.1 +7.2,0.23,0.32,8.5,0.058,47.0,186.0,0.9956,3.19,0.4,9.9 +7.2,0.23,0.32,8.5,0.058,47.0,186.0,0.9956,3.19,0.4,9.9 +8.1,0.28,0.4,6.9,0.05,30.0,97.0,0.9951,3.26,0.44,10.1 +6.2,0.32,0.16,7.0,0.045,30.0,136.0,0.9949,3.18,0.47,9.6 +7.0,0.27,0.36,20.7,0.045,45.0,170.0,1.001,3.0,0.45,8.8 +6.3,0.3,0.34,1.6,0.049,14.0,132.0,0.994,3.3,0.49,9.5 +8.1,0.22,0.43,1.5,0.044,28.0,129.0,0.9938,3.22,0.45,11.0 +8.1,0.27,0.41,1.45,0.033,11.0,63.0,0.9908,2.99,0.56,12.0 +8.6,0.23,0.4,4.2,0.035,17.0,109.0,0.9947,3.14,0.53,9.7 +7.9,0.18,0.37,1.2,0.04,16.0,75.0,0.992,3.18,0.63,10.8 +6.6,0.16,0.4,1.5,0.044,48.0,143.0,0.9912,3.54,0.52,12.4 +8.3,0.42,0.62,19.25,0.04,41.0,172.0,1.0002,2.98,0.67,9.7 +6.6,0.17,0.38,1.5,0.032,28.0,112.0,0.9914,3.25,0.55,11.4 +6.3,0.48,0.04,1.1,0.046,30.0,99.0,0.9928,3.24,0.36,9.6 +6.2,0.66,0.48,1.2,0.029,29.0,75.0,0.9892,3.33,0.39,12.8 +7.4,0.34,0.42,1.1,0.033,17.0,171.0,0.9917,3.12,0.53,11.3 +6.5,0.31,0.14,7.5,0.044,34.0,133.0,0.9955,3.22,0.5,9.5 +6.2,0.66,0.48,1.2,0.029,29.0,75.0,0.9892,3.33,0.39,12.8 +6.4,0.31,0.38,2.9,0.038,19.0,102.0,0.9912,3.17,0.35,11.0 +6.8,0.26,0.42,1.7,0.049,41.0,122.0,0.993,3.47,0.48,10.5 +7.6,0.67,0.14,1.5,0.074,25.0,168.0,0.9937,3.05,0.51,9.3 +6.6,0.27,0.41,1.3,0.052,16.0,142.0,0.9951,3.42,0.47,10.0 +7.0,0.25,0.32,9.0,0.046,56.0,245.0,0.9955,3.25,0.5,10.4 +6.9,0.24,0.35,1.0,0.052,35.0,146.0,0.993,3.45,0.44,10.0 +7.0,0.28,0.39,8.7,0.051,32.0,141.0,0.9961,3.38,0.53,10.5 +7.4,0.27,0.48,1.1,0.047,17.0,132.0,0.9914,3.19,0.49,11.6 +7.2,0.32,0.36,2.0,0.033,37.0,114.0,0.9906,3.1,0.71,12.3 +8.5,0.24,0.39,10.4,0.044,20.0,142.0,0.9974,3.2,0.53,10.0 +8.3,0.14,0.34,1.1,0.042,7.0,47.0,0.9934,3.47,0.4,10.2 +7.4,0.25,0.36,2.05,0.05,31.0,100.0,0.992,3.19,0.44,10.8 +6.2,0.12,0.34,1.5,0.045,43.0,117.0,0.9939,3.42,0.51,9.0 +5.8,0.27,0.2,14.95,0.044,22.0,179.0,0.9962,3.37,0.37,10.2 +7.3,0.28,0.43,1.7,0.08,21.0,123.0,0.9905,3.19,0.42,12.8 +6.5,0.39,0.23,5.4,0.051,25.0,149.0,0.9934,3.24,0.35,10.0 +7.0,0.33,0.32,1.2,0.053,38.0,138.0,0.9906,3.13,0.28,11.2 +7.3,0.24,0.39,17.95,0.057,45.0,149.0,0.9999,3.21,0.36,8.6 +7.3,0.24,0.39,17.95,0.057,45.0,149.0,0.9999,3.21,0.36,8.6 +6.7,0.23,0.39,2.5,0.172,63.0,158.0,0.9937,3.11,0.36,9.4 +6.7,0.24,0.39,2.9,0.173,63.0,157.0,0.9937,3.1,0.34,9.4 +7.0,0.31,0.26,7.4,0.069,28.0,160.0,0.9954,3.13,0.46,9.8 +6.6,0.24,0.27,1.4,0.057,33.0,152.0,0.9934,3.22,0.56,9.5 +6.7,0.23,0.26,1.4,0.06,33.0,154.0,0.9934,3.24,0.56,9.5 +7.4,0.18,0.31,1.4,0.058,38.0,167.0,0.9931,3.16,0.53,10.0 +6.2,0.45,0.26,4.4,0.063,63.0,206.0,0.994,3.27,0.52,9.8 +6.2,0.46,0.25,4.4,0.066,62.0,207.0,0.9939,3.25,0.52,9.8 +7.0,0.31,0.26,7.4,0.069,28.0,160.0,0.9954,3.13,0.46,9.8 +6.9,0.19,0.35,5.0,0.067,32.0,150.0,0.995,3.36,0.48,9.8 +7.2,0.19,0.31,1.6,0.062,31.0,173.0,0.9917,3.35,0.44,11.7 +6.6,0.25,0.29,1.1,0.068,39.0,124.0,0.9914,3.34,0.58,11.0 +6.2,0.16,0.33,1.1,0.057,21.0,82.0,0.991,3.32,0.46,10.9 +6.4,0.18,0.35,1.0,0.045,39.0,108.0,0.9911,3.31,0.35,10.9 +6.8,0.2,0.59,0.9,0.147,38.0,132.0,0.993,3.05,0.38,9.1 +6.9,0.25,0.35,1.3,0.039,29.0,191.0,0.9908,3.13,0.52,11.0 +7.2,0.21,0.34,11.9,0.043,37.0,213.0,0.9962,3.09,0.5,9.6 +6.0,0.19,0.26,12.4,0.048,50.0,147.0,0.9972,3.3,0.36,8.9 +6.6,0.38,0.15,4.6,0.044,25.0,78.0,0.9931,3.11,0.38,10.2 +7.4,0.2,0.36,1.2,0.038,44.0,111.0,0.9926,3.36,0.34,9.9 +6.8,0.22,0.24,4.9,0.092,30.0,123.0,0.9951,3.03,0.46,8.6 +6.0,0.19,0.26,12.4,0.048,50.0,147.0,0.9972,3.3,0.36,8.9 +7.0,0.47,0.07,1.1,0.035,17.0,151.0,0.991,3.02,0.34,10.5 +6.6,0.38,0.15,4.6,0.044,25.0,78.0,0.9931,3.11,0.38,10.2 +7.2,0.24,0.27,1.4,0.038,31.0,122.0,0.9927,3.15,0.46,10.3 +6.2,0.35,0.03,1.2,0.064,29.0,120.0,0.9934,3.22,0.54,9.1 +6.4,0.26,0.24,6.4,0.04,27.0,124.0,0.9903,3.22,0.49,12.6 +6.7,0.25,0.13,1.2,0.041,81.0,174.0,0.992,3.14,0.42,9.8 +6.7,0.23,0.31,2.1,0.046,30.0,96.0,0.9926,3.33,0.64,10.7 +7.4,0.24,0.29,10.1,0.05,21.0,105.0,0.9962,3.13,0.35,9.5 +6.2,0.27,0.43,7.8,0.056,48.0,244.0,0.9956,3.1,0.51,9.0 +6.8,0.3,0.23,4.6,0.061,50.5,238.5,0.9958,3.32,0.6,9.5 +6.0,0.27,0.28,4.8,0.063,31.0,201.0,0.9964,3.69,0.71,10.0 +8.6,0.23,0.46,1.0,0.054,9.0,72.0,0.9941,2.95,0.49,9.1 +6.7,0.23,0.31,2.1,0.046,30.0,96.0,0.9926,3.33,0.64,10.7 +7.4,0.24,0.29,10.1,0.05,21.0,105.0,0.9962,3.13,0.35,9.5 +7.1,0.18,0.36,1.4,0.043,31.0,87.0,0.9898,3.26,0.37,12.7 +7.0,0.32,0.34,1.3,0.042,20.0,69.0,0.9912,3.31,0.65,12.0 +7.4,0.18,0.3,8.8,0.064,26.0,103.0,0.9961,2.94,0.56,9.3 +6.7,0.54,0.28,5.4,0.06,21.0,105.0,0.9949,3.27,0.37,9.0 +6.8,0.22,0.31,1.4,0.053,34.0,114.0,0.9929,3.39,0.77,10.6 +7.1,0.2,0.34,16.0,0.05,51.0,166.0,0.9985,3.21,0.6,9.2 +7.1,0.34,0.2,6.1,0.063,47.0,164.0,0.9946,3.17,0.42,10.0 +7.3,0.22,0.3,8.2,0.047,42.0,207.0,0.9966,3.33,0.46,9.5 +7.1,0.43,0.61,11.8,0.045,54.0,155.0,0.9974,3.11,0.45,8.7 +7.1,0.44,0.62,11.8,0.044,52.0,152.0,0.9975,3.12,0.46,8.7 +7.2,0.39,0.63,11.0,0.044,55.0,156.0,0.9974,3.09,0.44,8.7 +6.8,0.25,0.31,13.3,0.05,69.0,202.0,0.9972,3.22,0.48,9.7 +7.1,0.43,0.61,11.8,0.045,54.0,155.0,0.9974,3.11,0.45,8.7 +7.1,0.44,0.62,11.8,0.044,52.0,152.0,0.9975,3.12,0.46,8.7 +7.2,0.39,0.63,11.0,0.044,55.0,156.0,0.9974,3.09,0.44,8.7 +6.1,0.27,0.43,7.5,0.049,65.0,243.0,0.9957,3.12,0.47,9.0 +6.9,0.24,0.33,1.7,0.035,47.0,136.0,0.99,3.26,0.4,12.6 +6.9,0.21,0.33,1.8,0.034,48.0,136.0,0.9899,3.25,0.41,12.6 +7.5,0.17,0.32,1.7,0.04,51.0,148.0,0.9916,3.21,0.44,11.5 +7.1,0.26,0.29,12.4,0.044,62.0,240.0,0.9969,3.04,0.42,9.2 +6.0,0.34,0.66,15.9,0.046,26.0,164.0,0.9979,3.14,0.5,8.8 +8.6,0.265,0.36,1.2,0.034,15.0,80.0,0.9913,2.95,0.36,11.4 +9.8,0.36,0.46,10.5,0.038,4.0,83.0,0.9956,2.89,0.3,10.1 +6.0,0.34,0.66,15.9,0.046,26.0,164.0,0.9979,3.14,0.5,8.8 +7.4,0.25,0.37,13.5,0.06,52.0,192.0,0.9975,3.0,0.44,9.1 +7.1,0.12,0.32,9.6,0.054,64.0,162.0,0.9962,3.4,0.41,9.4 +6.0,0.21,0.24,12.1,0.05,55.0,164.0,0.997,3.34,0.39,9.4 +7.5,0.305,0.4,18.9,0.059,44.0,170.0,1.0,2.99,0.46,9.0 +7.4,0.25,0.37,13.5,0.06,52.0,192.0,0.9975,3.0,0.44,9.1 +7.3,0.13,0.32,14.4,0.051,34.0,109.0,0.9974,3.2,0.35,9.2 +7.1,0.12,0.32,9.6,0.054,64.0,162.0,0.9962,3.4,0.41,9.4 +7.1,0.23,0.35,16.5,0.04,60.0,171.0,0.999,3.16,0.59,9.1 +7.1,0.23,0.35,16.5,0.04,60.0,171.0,0.999,3.16,0.59,9.1 +6.9,0.33,0.28,1.3,0.051,37.0,187.0,0.9927,3.27,0.6,10.3 +6.5,0.17,0.54,8.5,0.082,64.0,163.0,0.9959,2.89,0.39,8.8 +7.2,0.27,0.46,18.75,0.052,45.0,255.0,1.0,3.04,0.52,8.9 +7.2,0.31,0.5,13.3,0.056,68.0,195.0,0.9982,3.01,0.47,9.2 +6.7,0.41,0.34,9.2,0.049,29.0,150.0,0.9968,3.22,0.51,9.1 +6.7,0.41,0.34,9.2,0.049,29.0,150.0,0.9968,3.22,0.51,9.1 +5.5,0.485,0.0,1.5,0.065,8.0,103.0,0.994,3.63,0.4,9.7 +6.0,0.31,0.24,3.3,0.041,25.0,143.0,0.9914,3.31,0.44,11.3 +7.0,0.14,0.4,1.7,0.035,16.0,85.0,0.9911,3.19,0.42,11.8 +7.2,0.31,0.5,13.3,0.056,68.0,195.0,0.9982,3.01,0.47,9.2 +7.3,0.32,0.48,13.3,0.06,57.0,196.0,0.9982,3.04,0.5,9.2 +5.9,0.36,0.04,5.7,0.046,21.0,87.0,0.9934,3.22,0.51,10.2 +7.8,0.24,0.32,12.2,0.054,42.0,138.0,0.9984,3.01,0.54,8.8 +7.4,0.16,0.31,6.85,0.059,31.0,131.0,0.9952,3.29,0.34,9.7 +6.9,0.19,0.28,5.0,0.058,14.0,146.0,0.9952,3.29,0.36,9.1 +6.4,0.13,0.47,1.6,0.092,40.0,158.0,0.9928,3.21,0.36,9.8 +6.7,0.19,0.36,1.1,0.026,63.0,143.0,0.9912,3.27,0.48,11.0 +7.4,0.39,0.23,7.0,0.033,29.0,126.0,0.994,3.14,0.42,10.5 +6.5,0.24,0.32,7.6,0.038,48.0,203.0,0.9958,3.45,0.54,9.7 +6.1,0.3,0.56,2.8,0.044,47.0,179.0,0.9924,3.3,0.57,10.9 +6.1,0.3,0.56,2.7,0.046,46.0,184.0,0.9924,3.31,0.57,10.9 +5.7,0.26,0.25,10.4,0.02,7.0,57.0,0.994,3.39,0.37,10.6 +6.5,0.24,0.32,7.6,0.038,48.0,203.0,0.9958,3.45,0.54,9.7 +6.5,0.425,0.4,13.1,0.038,59.0,241.0,0.9979,3.23,0.57,9.0 +6.6,0.24,0.27,15.8,0.035,46.0,188.0,0.9982,3.24,0.51,9.2 +6.8,0.27,0.22,8.1,0.034,55.0,203.0,0.9961,3.19,0.52,8.9 +6.7,0.27,0.31,15.7,0.036,44.0,179.0,0.9979,3.26,0.56,9.6 +8.2,0.23,0.4,1.2,0.027,36.0,121.0,0.992,3.12,0.38,10.7 +7.1,0.37,0.67,10.5,0.045,49.0,155.0,0.9975,3.16,0.44,8.7 +6.8,0.19,0.36,1.9,0.035,30.0,96.0,0.9917,3.15,0.54,10.8 +8.1,0.28,0.39,1.9,0.029,18.0,79.0,0.9923,3.23,0.52,11.8 +6.3,0.31,0.34,2.2,0.045,20.0,77.0,0.9927,3.3,0.43,10.2 +7.1,0.37,0.67,10.5,0.045,49.0,155.0,0.9975,3.16,0.44,8.7 +7.9,0.21,0.4,1.2,0.039,38.0,107.0,0.992,3.21,0.54,10.8 +8.5,0.21,0.41,4.3,0.036,24.0,99.0,0.9947,3.18,0.53,9.7 +8.1,0.2,0.4,2.0,0.037,19.0,87.0,0.9921,3.12,0.54,11.2 +6.3,0.255,0.37,1.1,0.04,37.0,114.0,0.9905,3.0,0.39,10.9 +5.6,0.16,0.27,1.4,0.044,53.0,168.0,0.9918,3.28,0.37,10.1 +6.4,0.595,0.14,5.2,0.058,15.0,97.0,0.9951,3.38,0.36,9.0 +6.3,0.34,0.33,4.6,0.034,19.0,80.0,0.9917,3.38,0.58,12.0 +6.9,0.25,0.3,4.1,0.054,23.0,116.0,0.994,2.99,0.38,9.4 +7.9,0.22,0.38,8.0,0.043,46.0,152.0,0.9934,3.12,0.32,11.5 +7.6,0.18,0.46,10.2,0.055,58.0,135.0,0.9968,3.14,0.43,9.9 +6.9,0.25,0.3,4.1,0.054,23.0,116.0,0.994,2.99,0.38,9.4 +7.2,0.18,0.41,1.2,0.048,41.0,97.0,0.9919,3.14,0.45,10.4 +8.2,0.23,0.4,7.5,0.049,12.0,76.0,0.9966,3.06,0.84,9.7 +7.4,0.24,0.42,14.0,0.066,48.0,198.0,0.9979,2.89,0.42,8.9 +7.4,0.24,0.42,14.0,0.066,48.0,198.0,0.9979,2.89,0.42,8.9 +6.1,0.32,0.24,1.5,0.036,38.0,124.0,0.9898,3.29,0.42,12.4 +5.2,0.44,0.04,1.4,0.036,43.0,119.0,0.9894,3.36,0.33,12.1 +5.2,0.44,0.04,1.4,0.036,43.0,119.0,0.9894,3.36,0.33,12.1 +6.1,0.32,0.24,1.5,0.036,38.0,124.0,0.9898,3.29,0.42,12.4 +6.4,0.22,0.56,14.5,0.055,27.0,159.0,0.998,2.98,0.4,9.1 +6.3,0.36,0.3,4.8,0.049,14.0,85.0,0.9932,3.28,0.39,10.6 +7.4,0.24,0.42,14.0,0.066,48.0,198.0,0.9979,2.89,0.42,8.9 +6.7,0.24,0.35,13.1,0.05,64.0,205.0,0.997,3.15,0.5,9.5 +7.0,0.23,0.36,13.0,0.051,72.0,177.0,0.9972,3.16,0.49,9.8 +8.4,0.27,0.46,8.7,0.048,39.0,197.0,0.9974,3.14,0.59,9.6 +6.7,0.46,0.18,2.4,0.034,25.0,98.0,0.9896,3.08,0.44,12.6 +7.5,0.29,0.31,8.95,0.055,20.0,151.0,0.9968,3.08,0.54,9.3 +9.8,0.42,0.48,9.85,0.034,5.0,110.0,0.9958,2.87,0.29,10.0 +7.1,0.3,0.46,1.5,0.066,29.0,133.0,0.9906,3.12,0.54,12.7 +7.9,0.19,0.45,1.5,0.045,17.0,96.0,0.9917,3.13,0.39,11.0 +7.6,0.48,0.37,0.8,0.037,4.0,100.0,0.9902,3.03,0.39,11.4 +6.3,0.22,0.43,4.55,0.038,31.0,130.0,0.9918,3.35,0.33,11.5 +7.5,0.27,0.31,17.7,0.051,33.0,173.0,0.999,3.09,0.64,10.2 +6.9,0.23,0.4,7.5,0.04,50.0,151.0,0.9927,3.11,0.27,11.4 +7.2,0.32,0.47,5.1,0.044,19.0,65.0,0.991,3.03,0.41,12.6 +5.9,0.23,0.3,12.9,0.054,57.0,170.0,0.9972,3.28,0.39,9.4 +6.0,0.67,0.07,1.2,0.06,9.0,108.0,0.9931,3.11,0.35,8.7 +6.4,0.25,0.32,5.5,0.049,41.0,176.0,0.995,3.19,0.68,9.2 +6.4,0.33,0.31,5.5,0.048,42.0,173.0,0.9951,3.19,0.66,9.3 +7.1,0.34,0.15,1.2,0.053,61.0,183.0,0.9936,3.09,0.43,9.2 +6.8,0.28,0.4,22.0,0.048,48.0,167.0,1.001,2.93,0.5,8.7 +6.9,0.27,0.4,14.0,0.05,64.0,227.0,0.9979,3.18,0.58,9.6 +6.8,0.26,0.56,11.9,0.043,64.0,226.0,0.997,3.02,0.63,9.3 +6.8,0.29,0.56,11.9,0.043,66.0,230.0,0.9972,3.02,0.63,9.3 +6.7,0.24,0.41,9.4,0.04,49.0,166.0,0.9954,3.12,0.61,9.9 +5.9,0.3,0.23,4.2,0.038,42.0,119.0,0.9924,3.15,0.5,11.0 +6.8,0.53,0.35,3.8,0.034,26.0,109.0,0.9906,3.26,0.57,12.7 +6.5,0.28,0.28,8.5,0.047,54.0,210.0,0.9962,3.09,0.54,8.9 +6.6,0.28,0.28,8.5,0.052,55.0,211.0,0.9962,3.09,0.55,8.9 +6.8,0.28,0.4,22.0,0.048,48.0,167.0,1.001,2.93,0.5,8.7 +6.8,0.28,0.36,8.0,0.045,28.0,123.0,0.9928,3.02,0.37,11.4 +6.6,0.15,0.34,5.1,0.055,34.0,125.0,0.9942,3.36,0.42,9.6 +6.4,0.29,0.44,3.6,0.2,75.0,181.0,0.9942,3.02,0.41,9.1 +6.4,0.3,0.45,3.5,0.197,76.0,180.0,0.9942,3.02,0.39,9.1 +6.4,0.29,0.44,3.6,0.197,75.0,183.0,0.9942,3.01,0.38,9.1 +6.8,0.26,0.24,7.8,0.052,54.0,214.0,0.9961,3.13,0.47,8.9 +7.1,0.32,0.24,13.1,0.05,52.0,204.0,0.998,3.1,0.49,8.8 +6.8,0.26,0.24,7.8,0.052,54.0,214.0,0.9961,3.13,0.47,8.9 +6.8,0.27,0.26,16.1,0.049,55.0,196.0,0.9984,3.15,0.5,9.3 +7.1,0.32,0.24,13.1,0.05,52.0,204.0,0.998,3.1,0.49,8.8 +6.9,0.54,0.32,13.2,0.05,53.0,236.0,0.9973,3.2,0.5,9.6 +6.8,0.26,0.34,13.9,0.034,39.0,134.0,0.9949,3.33,0.53,12.0 +5.8,0.28,0.35,2.3,0.053,36.0,114.0,0.9924,3.28,0.5,10.2 +6.4,0.21,0.5,11.6,0.042,45.0,153.0,0.9972,3.15,0.43,8.8 +7.0,0.16,0.32,8.3,0.045,38.0,126.0,0.9958,3.21,0.34,9.2 +10.2,0.44,0.88,6.2,0.049,20.0,124.0,0.9968,2.99,0.51,9.9 +6.8,0.57,0.29,2.2,0.04,15.0,77.0,0.9938,3.32,0.74,10.2 +6.1,0.4,0.31,0.9,0.048,23.0,170.0,0.993,3.22,0.77,9.5 +5.6,0.245,0.25,9.7,0.032,12.0,68.0,0.994,3.31,0.34,10.5 +6.8,0.18,0.38,1.4,0.038,35.0,111.0,0.9918,3.32,0.59,11.2 +7.0,0.16,0.32,8.3,0.045,38.0,126.0,0.9958,3.21,0.34,9.2 +6.7,0.13,0.29,5.3,0.051,31.0,122.0,0.9944,3.44,0.37,9.7 +6.2,0.25,0.25,1.4,0.03,35.0,105.0,0.9912,3.3,0.44,11.1 +5.8,0.26,0.24,9.2,0.044,55.0,152.0,0.9961,3.31,0.38,9.4 +7.5,0.27,0.36,7.0,0.036,45.0,164.0,0.9939,3.03,0.33,11.0 +5.8,0.26,0.24,9.2,0.044,55.0,152.0,0.9961,3.31,0.38,9.4 +5.7,0.28,0.24,17.5,0.044,60.0,167.0,0.9989,3.31,0.44,9.4 +7.5,0.23,0.36,7.0,0.036,43.0,161.0,0.9938,3.04,0.32,11.0 +7.5,0.27,0.36,7.0,0.036,45.0,164.0,0.9939,3.03,0.33,11.0 +7.2,0.685,0.21,9.5,0.07,33.0,172.0,0.9971,3.0,0.55,9.1 +6.2,0.25,0.25,1.4,0.03,35.0,105.0,0.9912,3.3,0.44,11.1 +6.5,0.19,0.3,0.8,0.043,33.0,144.0,0.9936,3.42,0.39,9.1 +6.3,0.495,0.22,1.8,0.046,31.0,140.0,0.9929,3.39,0.54,10.4 +7.1,0.24,0.41,17.8,0.046,39.0,145.0,0.9998,3.32,0.39,8.7 +6.4,0.17,0.32,2.4,0.048,41.0,200.0,0.9938,3.5,0.5,9.7 +7.1,0.25,0.32,10.3,0.041,66.0,272.0,0.9969,3.17,0.52,9.1 +6.4,0.17,0.32,2.4,0.048,41.0,200.0,0.9938,3.5,0.5,9.7 +7.1,0.24,0.41,17.8,0.046,39.0,145.0,0.9998,3.32,0.39,8.7 +6.8,0.64,0.08,9.7,0.062,26.0,142.0,0.9972,3.37,0.46,8.9 +8.3,0.28,0.4,7.8,0.041,38.0,194.0,0.9976,3.34,0.51,9.6 +8.2,0.27,0.39,7.8,0.039,49.0,208.0,0.9976,3.31,0.51,9.5 +7.2,0.23,0.38,14.3,0.058,55.0,194.0,0.9979,3.09,0.44,9.0 +7.2,0.23,0.38,14.3,0.058,55.0,194.0,0.9979,3.09,0.44,9.0 +7.2,0.23,0.38,14.3,0.058,55.0,194.0,0.9979,3.09,0.44,9.0 +7.2,0.23,0.38,14.3,0.058,55.0,194.0,0.9979,3.09,0.44,9.0 +6.8,0.52,0.32,13.2,0.044,54.0,221.0,0.9972,3.27,0.5,9.6 +7.0,0.26,0.59,1.4,0.037,40.0,120.0,0.9918,3.34,0.41,11.1 +6.2,0.25,0.21,15.55,0.039,28.0,159.0,0.9982,3.48,0.64,9.6 +7.3,0.32,0.23,13.7,0.05,49.0,197.0,0.9985,3.2,0.46,8.7 +7.7,0.31,0.26,7.8,0.031,23.0,90.0,0.9944,3.13,0.5,10.4 +7.1,0.21,0.37,2.4,0.026,23.0,100.0,0.9903,3.15,0.38,11.4 +6.8,0.24,0.34,2.7,0.047,64.5,218.5,0.9934,3.3,0.58,9.7 +6.9,0.4,0.56,11.2,0.043,40.0,142.0,0.9975,3.14,0.46,8.7 +6.1,0.18,0.36,2.0,0.038,20.0,249.5,0.9923,3.37,0.79,11.3 +6.8,0.21,0.27,2.1,0.03,26.0,139.0,0.99,3.16,0.61,12.6 +5.8,0.2,0.27,1.4,0.031,12.0,77.0,0.9905,3.25,0.36,10.9 +5.6,0.19,0.26,1.4,0.03,12.0,76.0,0.9905,3.25,0.37,10.9 +6.1,0.41,0.14,10.4,0.037,18.0,119.0,0.996,3.38,0.45,10.0 +5.9,0.21,0.28,4.6,0.053,40.0,199.0,0.9964,3.72,0.7,10.0 +8.5,0.26,0.21,16.2,0.074,41.0,197.0,0.998,3.02,0.5,9.8 +6.9,0.4,0.56,11.2,0.043,40.0,142.0,0.9975,3.14,0.46,8.7 +5.8,0.24,0.44,3.5,0.029,5.0,109.0,0.9913,3.53,0.43,11.7 +5.8,0.24,0.39,1.5,0.054,37.0,158.0,0.9932,3.21,0.52,9.3 +6.7,0.26,0.39,1.1,0.04,45.0,147.0,0.9935,3.32,0.58,9.6 +6.3,0.35,0.3,5.7,0.035,8.0,97.0,0.9927,3.27,0.41,11.0 +6.3,0.35,0.3,5.7,0.035,8.0,97.0,0.9927,3.27,0.41,11.0 +6.4,0.23,0.39,1.8,0.032,23.0,118.0,0.9912,3.32,0.5,11.8 +5.8,0.36,0.38,0.9,0.037,3.0,75.0,0.9904,3.28,0.34,11.4 +6.9,0.115,0.35,5.4,0.048,36.0,108.0,0.9939,3.32,0.42,10.2 +6.9,0.29,0.4,19.45,0.043,36.0,156.0,0.9996,2.93,0.47,8.9 +6.9,0.28,0.4,8.2,0.036,15.0,95.0,0.9944,3.17,0.33,10.2 +7.2,0.29,0.4,13.6,0.045,66.0,231.0,0.9977,3.08,0.59,9.6 +6.2,0.24,0.35,1.2,0.038,22.0,167.0,0.9912,3.1,0.48,10.6 +6.9,0.29,0.4,19.45,0.043,36.0,156.0,0.9996,2.93,0.47,8.9 +6.9,0.32,0.26,8.3,0.053,32.0,180.0,0.9965,3.25,0.51,9.2 +5.3,0.58,0.07,6.9,0.043,34.0,149.0,0.9944,3.34,0.57,9.7 +5.3,0.585,0.07,7.1,0.044,34.0,145.0,0.9945,3.34,0.57,9.7 +5.4,0.59,0.07,7.0,0.045,36.0,147.0,0.9944,3.34,0.57,9.7 +6.9,0.32,0.26,8.3,0.053,32.0,180.0,0.9965,3.25,0.51,9.2 +5.2,0.6,0.07,7.0,0.044,33.0,147.0,0.9944,3.33,0.58,9.7 +5.8,0.25,0.26,13.1,0.051,44.0,148.0,0.9972,3.29,0.38,9.3 +6.6,0.58,0.3,5.1,0.057,30.0,123.0,0.9949,3.24,0.38,9.0 +7.0,0.29,0.54,10.7,0.046,59.0,234.0,0.9966,3.05,0.61,9.5 +6.6,0.19,0.41,8.9,0.046,51.0,169.0,0.9954,3.14,0.57,9.8 +6.7,0.2,0.41,9.1,0.044,50.0,166.0,0.9954,3.14,0.58,9.8 +7.7,0.26,0.4,1.1,0.042,9.0,60.0,0.9915,2.89,0.5,10.6 +6.8,0.32,0.34,1.2,0.044,14.0,67.0,0.9919,3.05,0.47,10.6 +7.0,0.3,0.49,4.7,0.036,17.0,105.0,0.9916,3.26,0.68,12.4 +7.0,0.24,0.36,2.8,0.034,22.0,112.0,0.99,3.19,0.38,12.6 +6.1,0.31,0.58,5.0,0.039,36.0,114.0,0.9909,3.3,0.6,12.3 +6.8,0.44,0.37,5.1,0.047,46.0,201.0,0.9938,3.08,0.65,10.5 +6.7,0.34,0.3,15.6,0.054,51.0,196.0,0.9982,3.19,0.49,9.3 +7.1,0.35,0.24,15.4,0.055,46.0,198.0,0.9988,3.12,0.49,8.8 +7.3,0.32,0.25,7.2,0.056,47.0,180.0,0.9961,3.08,0.47,8.8 +6.5,0.28,0.33,15.7,0.053,51.0,190.0,0.9978,3.22,0.51,9.7 +7.2,0.23,0.39,14.2,0.058,49.0,192.0,0.9979,2.98,0.48,9.0 +7.2,0.23,0.39,14.2,0.058,49.0,192.0,0.9979,2.98,0.48,9.0 +7.2,0.23,0.39,14.2,0.058,49.0,192.0,0.9979,2.98,0.48,9.0 +7.2,0.23,0.39,14.2,0.058,49.0,192.0,0.9979,2.98,0.48,9.0 +5.9,0.15,0.31,5.8,0.041,53.0,155.0,0.9945,3.52,0.46,10.5 +7.4,0.28,0.42,19.8,0.066,53.0,195.0,1.0,2.96,0.44,9.1 +6.2,0.28,0.22,7.3,0.041,26.0,157.0,0.9957,3.44,0.64,9.8 +9.1,0.59,0.38,1.6,0.066,34.0,182.0,0.9968,3.23,0.38,8.5 +6.3,0.33,0.27,1.2,0.046,34.0,175.0,0.9934,3.37,0.54,9.4 +8.3,0.39,0.7,10.6,0.045,33.0,169.0,0.9976,3.09,0.57,9.4 +7.2,0.19,0.46,3.8,0.041,82.0,187.0,0.9932,3.19,0.6,11.2 +7.5,0.17,0.44,11.3,0.046,65.0,146.0,0.997,3.17,0.45,10.0 +6.7,0.17,0.5,2.1,0.043,27.0,122.0,0.9923,3.15,0.45,10.3 +6.1,0.41,0.0,1.6,0.063,36.0,87.0,0.9914,3.27,0.67,10.8 +8.3,0.2,0.35,0.9,0.05,12.0,74.0,0.992,3.13,0.38,10.5 +6.1,0.41,0.0,1.6,0.063,36.0,87.0,0.9914,3.27,0.67,10.8 +6.0,0.29,0.21,1.3,0.055,42.0,168.0,0.9914,3.32,0.43,11.1 +7.3,0.41,0.24,6.8,0.057,41.0,163.0,0.9949,3.2,0.41,9.9 +7.3,0.41,0.24,6.8,0.057,41.0,163.0,0.9949,3.2,0.41,9.9 +7.2,0.43,0.24,6.7,0.058,40.0,163.0,0.995,3.2,0.41,9.9 +7.3,0.4,0.24,6.7,0.058,41.0,166.0,0.995,3.2,0.41,9.9 +6.2,0.33,0.27,4.9,0.036,30.0,134.0,0.9927,3.2,0.42,10.4 +6.2,0.31,0.26,4.8,0.037,36.0,148.0,0.9928,3.21,0.41,10.4 +6.1,0.36,0.27,2.1,0.035,16.0,100.0,0.9917,3.4,0.71,11.5 +5.0,0.55,0.14,8.3,0.032,35.0,164.0,0.9918,3.53,0.51,12.5 +7.8,0.25,0.41,3.7,0.042,37.0,149.0,0.9954,3.36,0.45,10.0 +5.7,0.36,0.21,6.7,0.038,51.0,166.0,0.9941,3.29,0.63,10.0 +5.8,0.34,0.21,6.6,0.04,50.0,167.0,0.9941,3.29,0.62,10.0 +6.8,0.28,0.6,1.1,0.132,42.0,127.0,0.9934,3.09,0.44,9.1 +6.8,0.25,0.34,4.7,0.031,34.0,134.0,0.9927,3.21,0.38,10.6 +6.6,0.24,0.35,7.7,0.031,36.0,135.0,0.9938,3.19,0.37,10.5 +5.9,0.3,0.47,7.85,0.03,19.0,133.0,0.9933,3.52,0.43,11.5 +6.1,0.125,0.25,3.3,0.04,10.0,69.0,0.9934,3.54,0.59,10.1 +6.0,0.1,0.24,1.1,0.041,15.0,65.0,0.9927,3.61,0.61,10.3 +6.6,0.24,0.35,7.7,0.031,36.0,135.0,0.9938,3.19,0.37,10.5 +6.8,0.25,0.34,4.7,0.031,34.0,134.0,0.9927,3.21,0.38,10.6 +6.8,0.28,0.44,9.3,0.031,35.0,137.0,0.9946,3.16,0.36,10.4 +8.3,0.41,0.51,2.0,0.046,11.0,207.0,0.993,3.02,0.55,11.4 +7.5,0.27,0.31,5.8,0.057,131.0,313.0,0.9946,3.18,0.59,10.5 +7.9,0.26,0.41,15.15,0.04,38.0,216.0,0.9976,2.96,0.6,10.0 +6.4,0.34,0.23,6.3,0.039,37.0,143.0,0.9944,3.19,0.65,10.0 +6.5,0.28,0.35,15.4,0.042,55.0,195.0,0.9978,3.23,0.5,9.6 +7.2,0.21,0.41,1.3,0.036,33.0,85.0,0.992,3.17,0.51,10.4 +6.4,0.32,0.35,4.8,0.03,34.0,101.0,0.9912,3.36,0.6,12.5 +6.8,0.24,0.34,4.6,0.032,37.0,135.0,0.9927,3.2,0.39,10.6 +6.3,0.23,0.3,1.8,0.033,16.0,91.0,0.9906,3.28,0.4,11.8 +6.5,0.28,0.34,9.9,0.038,30.0,133.0,0.9954,3.11,0.44,9.8 +5.6,0.26,0.26,5.7,0.031,12.0,80.0,0.9923,3.25,0.38,10.8 +6.3,0.23,0.3,1.8,0.033,16.0,91.0,0.9906,3.28,0.4,11.8 +6.3,0.23,0.33,1.5,0.036,15.0,105.0,0.991,3.32,0.42,11.2 +5.8,0.27,0.27,12.3,0.045,55.0,170.0,0.9972,3.28,0.42,9.3 +5.9,0.26,0.4,1.3,0.047,12.0,139.0,0.9945,3.45,0.53,10.4 +6.6,0.18,0.35,1.5,0.049,49.0,141.0,0.9934,3.43,0.85,10.2 +7.4,0.2,0.43,7.8,0.045,27.0,153.0,0.9964,3.19,0.55,9.0 +8.0,0.24,0.36,1.5,0.047,17.0,129.0,0.9948,3.2,0.54,10.0 +6.4,0.26,0.42,9.7,0.044,30.0,140.0,0.9962,3.18,0.47,9.1 +5.4,0.31,0.47,3.0,0.053,46.0,144.0,0.9931,3.29,0.76,10.0 +5.4,0.29,0.47,3.0,0.052,47.0,145.0,0.993,3.29,0.75,10.0 +7.1,0.145,0.33,4.6,0.05,33.0,131.0,0.9942,3.28,0.4,9.6 +5.6,0.34,0.1,1.3,0.031,20.0,68.0,0.9906,3.36,0.51,11.2 +6.7,0.19,0.41,15.6,0.056,75.0,155.0,0.9995,3.2,0.44,8.8 +7.8,0.18,0.46,13.6,0.052,38.0,118.0,0.998,3.15,0.5,10.0 +7.6,0.17,0.45,11.2,0.054,56.0,137.0,0.997,3.15,0.47,10.0 +6.3,0.12,0.36,2.1,0.044,47.0,146.0,0.9914,3.27,0.74,11.4 +7.3,0.33,0.4,6.85,0.038,32.0,138.0,0.992,3.03,0.3,11.9 +5.5,0.335,0.3,2.5,0.071,27.0,128.0,0.9924,3.14,0.51,9.6 +7.3,0.33,0.4,6.85,0.038,32.0,138.0,0.992,3.03,0.3,11.9 +5.8,0.4,0.42,4.4,0.047,38.5,245.0,0.9937,3.25,0.57,9.6 +7.3,0.22,0.37,14.3,0.063,48.0,191.0,0.9978,2.89,0.38,9.0 +7.3,0.22,0.37,14.3,0.063,48.0,191.0,0.9978,2.89,0.38,9.0 +6.1,0.36,0.33,1.1,0.05,24.0,169.0,0.9927,3.15,0.78,9.5 +10.0,0.2,0.39,1.4,0.05,19.0,152.0,0.994,3.0,0.42,10.4 +6.9,0.24,0.34,4.7,0.04,43.0,161.0,0.9935,3.2,0.59,10.6 +6.4,0.24,0.32,14.9,0.047,54.0,162.0,0.9968,3.28,0.5,10.2 +7.1,0.365,0.14,1.2,0.055,24.0,84.0,0.9941,3.15,0.43,8.9 +6.8,0.15,0.3,5.3,0.05,40.0,127.0,0.9942,3.4,0.39,9.7 +7.3,0.22,0.37,14.3,0.063,48.0,191.0,0.9978,2.89,0.38,9.0 +6.8,0.16,0.4,2.3,0.037,18.0,102.0,0.9923,3.49,0.42,11.4 +6.0,0.26,0.32,3.5,0.028,29.0,113.0,0.9912,3.4,0.71,12.3 +6.0,0.18,0.27,1.5,0.089,40.0,143.0,0.9923,3.49,0.62,10.8 +6.9,0.33,0.21,1.0,0.053,39.0,148.0,0.9927,3.12,0.45,9.4 +7.7,0.29,0.48,2.3,0.049,36.0,178.0,0.9931,3.17,0.64,10.6 +7.1,0.39,0.35,12.5,0.044,26.0,72.0,0.9941,3.17,0.29,11.6 +6.9,0.33,0.21,1.0,0.053,39.0,148.0,0.9927,3.12,0.45,9.4 +7.7,0.29,0.48,2.3,0.049,36.0,178.0,0.9931,3.17,0.64,10.6 +6.6,0.905,0.19,0.8,0.048,17.0,204.0,0.9934,3.34,0.56,10.0 +7.2,0.27,0.27,2.4,0.048,30.0,149.0,0.9936,3.1,0.51,9.2 +5.1,0.33,0.22,1.6,0.027,18.0,89.0,0.9893,3.51,0.38,12.5 +5.1,0.33,0.22,1.6,0.027,18.0,89.0,0.9893,3.51,0.38,12.5 +6.4,0.31,0.28,1.5,0.037,12.0,119.0,0.9919,3.32,0.51,10.4 +7.3,0.2,0.44,1.4,0.045,21.0,98.0,0.9924,3.15,0.46,10.0 +5.7,0.32,0.5,2.6,0.049,17.0,155.0,0.9927,3.22,0.64,10.0 +6.4,0.31,0.28,1.5,0.037,12.0,119.0,0.9919,3.32,0.51,10.4 +7.3,0.2,0.44,1.4,0.045,21.0,98.0,0.9924,3.15,0.46,10.0 +7.2,0.28,0.26,12.5,0.046,48.0,179.0,0.9975,3.1,0.52,9.0 +7.5,0.35,0.28,9.6,0.051,26.0,157.0,0.9969,3.12,0.53,9.2 +7.2,0.27,0.27,2.4,0.048,30.0,149.0,0.9936,3.1,0.51,9.2 +6.0,0.36,0.39,3.2,0.027,20.0,125.0,0.991,3.38,0.39,11.3 +5.1,0.33,0.22,1.6,0.027,18.0,89.0,0.9893,3.51,0.38,12.5 +5.0,0.17,0.56,1.5,0.026,24.0,115.0,0.9906,3.48,0.39,10.8 +6.3,0.39,0.35,5.9,0.04,82.5,260.0,0.9941,3.12,0.66,10.1 +6.7,0.21,0.32,5.4,0.047,29.0,140.0,0.995,3.39,0.46,9.7 +7.0,0.3,0.38,14.9,0.032,60.0,181.0,0.9983,3.18,0.61,9.3 +7.0,0.3,0.38,14.9,0.032,60.0,181.0,0.9983,3.18,0.61,9.3 +6.5,0.36,0.32,1.1,0.031,13.0,66.0,0.9916,3.1,0.46,10.6 +6.1,0.55,0.15,9.8,0.031,19.0,125.0,0.9957,3.36,0.47,10.2 +7.3,0.24,0.43,2.0,0.021,20.0,69.0,0.99,3.08,0.56,12.2 +6.8,0.37,0.51,11.8,0.044,62.0,163.0,0.9976,3.19,0.44,8.8 +6.8,0.27,0.12,1.3,0.04,87.0,168.0,0.992,3.18,0.41,10.0 +8.2,0.28,0.42,1.8,0.031,30.0,93.0,0.9917,3.09,0.39,11.4 +6.3,0.2,0.4,1.5,0.037,35.0,107.0,0.9917,3.46,0.5,11.4 +5.9,0.26,0.27,18.2,0.048,52.0,168.0,0.9993,3.35,0.44,9.4 +6.4,0.19,0.42,2.9,0.032,32.0,83.0,0.9908,3.3,0.41,11.7 +6.3,0.2,0.4,1.5,0.037,35.0,107.0,0.9917,3.46,0.5,11.4 +6.8,0.37,0.51,11.8,0.044,62.0,163.0,0.9976,3.19,0.44,8.8 +6.1,0.35,0.07,1.4,0.069,22.0,108.0,0.9934,3.23,0.52,9.2 +7.1,0.27,0.31,18.2,0.046,55.0,252.0,1.0,3.07,0.56,8.7 +6.8,0.22,0.31,6.3,0.035,33.0,170.0,0.9918,3.24,0.66,12.6 +6.8,0.27,0.12,1.3,0.04,87.0,168.0,0.992,3.18,0.41,10.0 +5.8,0.28,0.34,4.0,0.031,40.0,99.0,0.9896,3.39,0.39,12.8 +6.9,0.49,0.24,1.2,0.049,13.0,125.0,0.9932,3.17,0.51,9.4 +6.3,0.14,0.39,1.2,0.044,26.0,116.0,0.992,3.26,0.53,10.3 +8.2,0.28,0.42,1.8,0.031,30.0,93.0,0.9917,3.09,0.39,11.4 +7.2,0.25,0.39,18.95,0.038,42.0,155.0,0.9999,2.97,0.47,9.0 +7.3,0.28,0.36,12.7,0.04,38.0,140.0,0.998,3.3,0.79,9.6 +7.2,0.19,0.39,1.2,0.036,32.0,85.0,0.9918,3.16,0.5,10.5 +7.2,0.19,0.39,1.2,0.036,32.0,85.0,0.9918,3.16,0.5,10.5 +7.2,0.25,0.39,18.95,0.038,42.0,155.0,0.9999,2.97,0.47,9.0 +7.3,0.28,0.36,12.7,0.04,38.0,140.0,0.998,3.3,0.79,9.6 +7.4,0.21,0.27,1.2,0.041,27.0,99.0,0.9927,3.19,0.33,9.8 +6.8,0.26,0.22,7.7,0.047,57.0,210.0,0.9959,3.1,0.47,9.0 +7.4,0.21,0.27,1.2,0.041,27.0,99.0,0.9927,3.19,0.33,9.8 +7.4,0.31,0.28,1.6,0.05,33.0,137.0,0.9929,3.31,0.56,10.5 +7.0,0.22,0.31,2.7,0.03,41.0,136.0,0.9898,3.16,0.37,12.7 +7.0,0.21,0.28,8.7,0.045,37.0,222.0,0.9954,3.25,0.54,10.4 +7.0,0.21,0.28,8.6,0.045,37.0,221.0,0.9954,3.25,0.54,10.4 +7.0,0.21,0.28,8.6,0.045,37.0,221.0,0.9954,3.25,0.54,10.4 +6.9,0.23,0.38,8.3,0.047,47.0,162.0,0.9954,3.34,0.52,10.5 +7.0,0.21,0.28,8.7,0.045,37.0,222.0,0.9954,3.25,0.54,10.4 +7.0,0.21,0.28,8.6,0.045,37.0,221.0,0.9954,3.25,0.54,10.4 +6.8,0.29,0.5,13.3,0.053,48.0,194.0,0.9974,3.09,0.45,9.4 +7.8,0.21,0.27,1.2,0.051,20.0,89.0,0.9936,3.06,0.46,9.1 +7.1,0.31,0.47,13.6,0.056,54.0,197.0,0.9978,3.1,0.49,9.3 +6.8,0.29,0.5,13.3,0.053,48.0,194.0,0.9974,3.09,0.45,9.4 +6.4,0.34,0.1,1.1,0.048,19.0,84.0,0.9927,3.21,0.38,9.8 +7.4,0.155,0.34,2.3,0.045,73.5,214.0,0.9934,3.18,0.61,9.9 +7.2,0.55,0.09,1.5,0.108,16.0,151.0,0.9938,3.07,0.57,9.2 +7.0,0.23,0.36,7.1,0.028,31.0,104.0,0.9922,3.35,0.47,12.1 +6.9,0.2,0.37,6.2,0.027,24.0,97.0,0.992,3.38,0.49,12.2 +6.1,0.28,0.32,2.5,0.042,23.0,218.5,0.9935,3.27,0.6,9.8 +6.6,0.16,0.32,1.4,0.035,49.0,186.0,0.9906,3.35,0.64,12.4 +7.4,0.155,0.34,2.3,0.045,73.5,214.0,0.9934,3.18,0.61,9.9 +6.2,0.35,0.04,1.2,0.06,23.0,108.0,0.9934,3.26,0.54,9.2 +6.7,0.22,0.37,1.6,0.028,24.0,102.0,0.9913,3.29,0.59,11.6 +6.1,0.38,0.2,6.6,0.033,25.0,137.0,0.9938,3.3,0.69,10.4 +6.0,0.25,0.28,2.2,0.026,54.0,126.0,0.9898,3.43,0.65,12.9 +6.6,0.52,0.44,12.2,0.048,54.0,245.0,0.9975,3.26,0.54,9.3 +6.9,0.24,0.36,20.8,0.031,40.0,139.0,0.9975,3.2,0.33,11.0 +7.1,0.32,0.32,11.0,0.038,16.0,66.0,0.9937,3.24,0.4,11.5 +5.8,0.28,0.27,2.6,0.054,30.0,156.0,0.9914,3.53,0.42,12.4 +6.5,0.41,0.24,14.0,0.048,24.0,113.0,0.9982,3.44,0.53,9.8 +6.5,0.41,0.24,14.0,0.048,24.0,113.0,0.9982,3.44,0.53,9.8 +6.4,0.28,0.29,1.6,0.052,34.0,127.0,0.9929,3.48,0.56,10.5 +7.2,0.6,0.2,9.9,0.07,21.0,174.0,0.9971,3.03,0.54,9.1 +6.1,0.2,0.25,1.2,0.038,34.0,128.0,0.9921,3.24,0.44,10.1 +5.9,0.46,0.14,2.7,0.042,27.0,160.0,0.9931,3.46,0.51,10.6 +6.0,0.27,0.27,1.6,0.046,32.0,113.0,0.9924,3.41,0.51,10.5 +6.4,0.28,0.29,1.6,0.052,34.0,127.0,0.9929,3.48,0.56,10.5 +6.4,0.41,0.24,14.0,0.048,24.0,113.0,0.9982,3.44,0.53,9.8 +6.3,0.23,0.31,1.5,0.022,11.0,82.0,0.9892,3.3,0.4,12.9 +7.1,0.21,0.27,8.6,0.056,26.0,111.0,0.9956,2.95,0.52,9.5 +6.0,0.37,0.32,1.0,0.053,31.0,218.5,0.9924,3.29,0.72,9.8 +6.1,0.43,0.35,9.1,0.059,83.0,249.0,0.9971,3.37,0.5,8.5 +7.1,0.21,0.27,8.6,0.056,26.0,111.0,0.9956,2.95,0.52,9.5 +7.0,0.25,0.29,15.2,0.047,40.0,171.0,0.9982,3.22,0.45,9.3 +5.9,0.25,0.19,12.4,0.047,50.0,162.0,0.9973,3.35,0.38,9.5 +6.8,0.32,0.21,2.2,0.044,15.0,68.0,0.9932,3.17,0.39,9.4 +7.2,0.39,0.62,11.0,0.047,66.0,178.0,0.9976,3.16,0.5,8.7 +6.3,0.21,0.58,10.0,0.081,34.0,126.0,0.9962,2.95,0.46,8.9 +7.0,0.14,0.32,9.0,0.039,54.0,141.0,0.9956,3.22,0.43,9.4 +6.8,0.32,0.21,2.2,0.044,15.0,68.0,0.9932,3.17,0.39,9.4 +7.2,0.39,0.62,11.0,0.047,66.0,178.0,0.9976,3.16,0.5,8.7 +7.2,0.29,0.53,18.15,0.047,59.0,182.0,0.9992,3.09,0.52,9.6 +8.6,0.37,0.7,12.15,0.039,21.0,158.0,0.9983,3.0,0.73,9.3 +6.5,0.38,0.34,3.4,0.036,34.0,200.0,0.9937,3.14,0.76,10.0 +6.6,0.24,0.29,2.0,0.023,19.0,86.0,0.99,3.25,0.45,12.5 +7.0,0.17,0.31,4.8,0.034,34.0,132.0,0.9944,3.36,0.48,9.6 +5.5,0.16,0.22,4.5,0.03,30.0,102.0,0.9938,3.24,0.36,9.4 +7.0,0.24,0.51,11.0,0.029,55.0,227.0,0.9965,3.03,0.61,9.5 +7.4,0.28,0.36,1.1,0.028,42.0,105.0,0.9893,2.99,0.39,12.4 +7.0,0.22,0.28,1.5,0.037,29.0,115.0,0.9927,3.11,0.55,10.5 +7.1,0.55,0.13,1.7,0.073,21.0,165.0,0.994,2.97,0.58,9.2 +6.3,0.22,0.33,1.7,0.041,67.0,164.0,0.9928,3.32,0.56,10.4 +6.7,0.47,0.34,8.9,0.043,31.0,172.0,0.9964,3.22,0.6,9.2 +5.9,0.36,0.41,1.3,0.047,45.0,104.0,0.9917,3.33,0.51,10.6 +5.8,0.25,0.24,13.3,0.044,41.0,137.0,0.9972,3.34,0.42,9.5 +6.7,0.47,0.34,8.9,0.043,31.0,172.0,0.9964,3.22,0.6,9.2 +6.2,0.37,0.3,6.6,0.346,79.0,200.0,0.9954,3.29,0.58,9.6 +6.2,0.18,0.38,1.5,0.028,36.0,117.0,0.993,3.47,0.54,9.7 +6.0,0.16,0.37,1.5,0.025,43.0,117.0,0.9928,3.46,0.51,9.7 +6.6,0.34,0.28,1.3,0.035,32.0,90.0,0.9916,3.1,0.42,10.7 +7.4,0.29,0.29,1.6,0.045,53.0,180.0,0.9936,3.34,0.68,10.5 +7.4,0.26,0.31,7.6,0.047,52.0,177.0,0.9962,3.13,0.45,8.9 +7.0,0.28,0.36,1.0,0.035,8.0,70.0,0.9899,3.09,0.46,12.1 +7.1,0.23,0.39,1.6,0.032,12.0,65.0,0.9898,3.25,0.4,12.7 +7.8,0.19,0.26,8.9,0.039,42.0,182.0,0.996,3.18,0.46,9.9 +6.3,0.19,0.28,1.8,0.022,28.0,158.0,0.9907,3.2,0.64,11.4 +6.8,0.2,0.38,4.7,0.04,27.0,103.0,0.994,3.37,0.58,10.7 +5.7,0.44,0.13,7.0,0.025,28.0,173.0,0.9913,3.33,0.48,12.5 +7.2,0.4,0.62,10.8,0.041,70.0,189.0,0.9976,3.08,0.49,8.6 +6.8,0.23,0.32,1.6,0.026,43.0,147.0,0.9904,3.29,0.54,12.5 +5.7,0.335,0.34,1.0,0.04,13.0,174.0,0.992,3.27,0.66,10.0 +7.2,0.4,0.62,10.8,0.041,70.0,189.0,0.9976,3.08,0.49,8.6 +7.2,0.28,0.54,16.7,0.045,54.0,200.0,0.999,3.08,0.49,9.5 +6.8,0.19,0.58,14.2,0.038,51.0,164.0,0.9975,3.12,0.48,9.6 +6.4,0.3,0.3,2.25,0.038,8.0,210.0,0.9937,3.2,0.62,9.9 +6.5,0.3,0.29,2.25,0.037,8.0,210.0,0.9937,3.19,0.62,9.9 +7.8,0.18,0.31,12.2,0.053,46.0,140.0,0.998,3.06,0.53,8.9 +7.8,0.18,0.31,12.2,0.053,46.0,140.0,0.998,3.06,0.53,8.9 +7.3,0.51,0.26,3.3,0.09,7.0,135.0,0.9944,3.01,0.52,8.8 +6.0,0.24,0.27,1.9,0.048,40.0,170.0,0.9938,3.64,0.54,10.0 +5.9,0.62,0.28,3.5,0.039,55.0,152.0,0.9907,3.44,0.44,12.0 +6.0,0.24,0.27,1.9,0.048,40.0,170.0,0.9938,3.64,0.54,10.0 +6.7,0.27,0.12,1.3,0.041,62.0,138.0,0.9921,3.21,0.42,10.0 +7.8,0.34,0.35,1.8,0.042,8.0,167.0,0.9908,3.11,0.41,12.1 +7.3,0.26,0.36,5.2,0.04,31.0,141.0,0.9931,3.16,0.59,11.0 +7.4,0.36,0.33,1.4,0.025,27.0,55.0,0.9915,3.21,0.33,11.2 +7.8,0.28,0.32,9.0,0.036,34.0,115.0,0.9952,3.17,0.39,10.3 +6.1,0.31,0.26,2.2,0.051,28.0,167.0,0.9926,3.37,0.47,10.4 +6.8,0.18,0.37,1.6,0.055,47.0,154.0,0.9934,3.08,0.45,9.1 +7.4,0.15,0.42,1.7,0.045,49.0,154.0,0.992,3.0,0.6,10.4 +5.9,0.13,0.28,1.9,0.05,20.0,78.0,0.9918,3.43,0.64,10.8 +7.2,0.34,0.34,12.6,0.048,7.0,41.0,0.9942,3.19,0.4,11.7 +7.9,0.19,0.26,2.1,0.039,8.0,143.0,0.9942,3.05,0.74,9.8 +7.9,0.19,0.26,2.1,0.039,8.0,143.0,0.9942,3.05,0.74,9.8 +6.9,0.25,0.4,1.3,0.038,22.0,101.0,0.9901,3.03,0.39,11.4 +5.8,0.36,0.32,1.7,0.033,22.0,96.0,0.9898,3.03,0.38,11.2 +5.6,0.35,0.37,1.0,0.038,6.0,72.0,0.9902,3.37,0.34,11.4 +5.9,0.32,0.39,3.3,0.114,24.0,140.0,0.9934,3.09,0.45,9.2 +7.2,0.31,0.46,5.0,0.04,3.0,29.0,0.9906,3.04,0.53,12.5 +6.1,0.28,0.22,1.8,0.034,32.0,116.0,0.9898,3.36,0.44,12.6 +5.2,0.36,0.02,1.6,0.031,24.0,104.0,0.9896,3.44,0.35,12.2 +5.6,0.19,0.47,4.5,0.03,19.0,112.0,0.9922,3.56,0.45,11.2 +6.4,0.1,0.35,4.9,0.048,31.0,103.0,0.9947,3.43,0.79,9.7 +6.4,0.18,0.48,4.0,0.186,64.0,150.0,0.9945,3.06,0.4,9.3 +7.4,0.25,0.36,13.2,0.067,53.0,178.0,0.9976,3.01,0.48,9.0 +7.4,0.25,0.36,13.2,0.067,53.0,178.0,0.9976,3.01,0.48,9.0 +7.4,0.25,0.36,13.2,0.067,53.0,178.0,0.9976,3.01,0.48,9.0 +7.9,0.345,0.51,15.3,0.047,54.0,171.0,0.9987,3.09,0.51,9.1 +7.9,0.345,0.51,15.3,0.047,54.0,171.0,0.9987,3.09,0.51,9.1 +7.4,0.25,0.36,13.2,0.067,53.0,178.0,0.9976,3.01,0.48,9.0 +6.1,0.24,0.3,1.5,0.045,22.0,61.0,0.992,3.31,0.54,10.4 +6.8,0.25,0.24,4.55,0.053,41.0,211.0,0.9955,3.37,0.67,9.5 +6.7,0.31,0.31,9.9,0.04,10.0,175.0,0.9953,3.46,0.55,11.4 +7.2,0.46,0.65,10.4,0.05,76.0,192.0,0.9976,3.16,0.42,8.7 +5.5,0.35,0.35,1.1,0.045,14.0,167.0,0.992,3.34,0.68,9.9 +6.7,0.24,0.41,8.7,0.036,29.0,148.0,0.9952,3.22,0.62,9.9 +6.8,0.28,0.17,13.9,0.047,49.0,162.0,0.9983,3.21,0.51,9.0 +6.4,0.16,0.22,1.4,0.04,41.0,149.0,0.9933,3.49,0.58,10.0 +6.3,0.26,0.24,7.2,0.039,38.0,172.0,0.9958,3.49,0.64,9.7 +7.7,0.22,0.42,1.9,0.052,10.0,87.0,0.9922,3.3,0.49,11.8 +6.5,0.18,0.31,1.7,0.044,30.0,127.0,0.9928,3.49,0.5,10.2 +7.2,0.46,0.65,10.4,0.05,76.0,192.0,0.9976,3.16,0.42,8.7 +7.0,0.3,0.51,13.6,0.05,40.0,168.0,0.9976,3.07,0.52,9.6 +9.2,0.25,0.34,1.2,0.026,31.0,93.0,0.9916,2.93,0.37,11.3 +7.8,0.28,0.34,1.6,0.028,32.0,118.0,0.9901,3.0,0.38,12.1 +7.0,0.3,0.51,13.6,0.05,40.0,168.0,0.9976,3.07,0.52,9.6 +7.8,0.28,0.34,1.6,0.028,32.0,118.0,0.9901,3.0,0.38,12.1 +9.2,0.25,0.34,1.2,0.026,31.0,93.0,0.9916,2.93,0.37,11.3 +8.4,0.35,0.71,12.2,0.046,22.0,160.0,0.9982,2.98,0.65,9.4 +6.1,0.41,0.24,1.6,0.049,16.0,137.0,0.993,3.32,0.5,10.4 +5.9,0.21,0.24,12.1,0.044,53.0,165.0,0.9969,3.25,0.39,9.5 +7.2,0.34,0.44,4.2,0.047,51.0,144.0,0.991,3.01,0.76,12.3 +6.7,0.21,0.42,9.1,0.049,31.0,150.0,0.9953,3.12,0.74,9.9 +5.9,0.37,0.1,1.6,0.057,39.0,128.0,0.9924,3.24,0.48,10.1 +7.7,0.34,0.27,8.8,0.063,39.0,184.0,0.9969,3.09,0.63,9.2 +7.4,0.3,0.22,1.4,0.046,16.0,135.0,0.9928,3.08,0.77,10.4 +6.8,0.51,0.3,4.2,0.066,38.0,165.0,0.9945,3.2,0.42,9.1 +7.8,0.22,0.38,10.3,0.059,28.0,99.0,0.9967,3.12,0.47,10.0 +7.2,0.35,0.34,12.4,0.051,6.0,37.0,0.9944,3.13,0.39,11.5 +6.0,0.26,0.5,2.2,0.048,59.0,153.0,0.9928,3.08,0.61,9.8 +6.1,0.26,0.51,2.2,0.05,61.0,154.0,0.9929,3.08,0.6,9.8 +6.5,0.28,0.27,5.2,0.04,44.0,179.0,0.9948,3.19,0.69,9.4 +7.4,0.41,0.66,10.8,0.051,77.0,194.0,0.9976,3.05,0.46,8.7 +6.5,0.28,0.29,2.7,0.038,26.0,107.0,0.9912,3.32,0.41,11.6 +6.7,0.34,0.54,16.3,0.047,44.0,181.0,0.9987,3.04,0.56,8.8 +7.2,0.2,0.34,2.7,0.032,49.0,151.0,0.99,3.16,0.39,12.7 +7.4,0.2,0.33,1.9,0.035,39.0,138.0,0.991,3.17,0.44,11.7 +8.2,0.22,0.3,1.8,0.047,47.0,185.0,0.9933,3.13,0.5,10.2 +8.2,0.23,0.29,1.8,0.047,47.0,187.0,0.9933,3.13,0.5,10.2 +7.1,0.22,0.33,2.8,0.033,48.0,153.0,0.9899,3.15,0.38,12.7 +6.5,0.28,0.29,2.7,0.038,26.0,107.0,0.9912,3.32,0.41,11.6 +6.0,0.38,0.26,6.0,0.034,42.0,134.0,0.9912,3.38,0.38,12.3 +7.4,0.41,0.66,10.8,0.051,77.0,194.0,0.9976,3.05,0.46,8.7 +5.7,0.18,0.22,4.2,0.042,25.0,111.0,0.994,3.35,0.39,9.4 +7.3,0.3,0.22,6.4,0.056,44.0,168.0,0.9947,3.13,0.35,10.1 +7.4,0.24,0.22,10.7,0.042,26.0,81.0,0.9954,2.86,0.36,9.7 +6.6,0.25,0.3,1.6,0.046,32.0,134.0,0.993,3.42,0.51,10.1 +7.4,0.24,0.22,10.7,0.042,26.0,81.0,0.9954,2.86,0.36,9.7 +7.4,0.26,0.3,7.9,0.049,38.0,157.0,0.9963,3.13,0.48,8.9 +6.1,0.32,0.25,1.7,0.034,37.0,136.0,0.992,3.47,0.5,10.8 +6.9,0.28,0.27,2.1,0.036,42.0,121.0,0.9926,3.42,0.49,10.8 +7.0,0.23,0.33,5.8,0.04,25.0,136.0,0.995,3.19,0.58,9.5 +7.1,0.31,0.5,14.5,0.059,6.0,148.0,0.9983,2.94,0.44,9.1 +7.3,0.2,0.37,1.2,0.037,48.0,119.0,0.992,3.32,0.49,10.9 +6.9,0.41,0.33,10.1,0.043,28.0,152.0,0.9968,3.2,0.52,9.4 +6.4,0.45,0.07,1.1,0.03,10.0,131.0,0.9905,2.97,0.28,10.8 +6.4,0.475,0.06,1.0,0.03,9.0,131.0,0.9904,2.97,0.29,10.8 +6.3,0.27,0.38,0.9,0.051,7.0,140.0,0.9926,3.45,0.5,10.5 +6.9,0.41,0.33,10.1,0.043,28.0,152.0,0.9968,3.2,0.52,9.4 +7.0,0.29,0.37,4.9,0.034,26.0,127.0,0.9928,3.17,0.44,10.8 +5.9,0.27,0.29,11.4,0.036,31.0,115.0,0.9949,3.35,0.48,10.5 +6.9,0.19,0.4,1.4,0.036,14.0,55.0,0.9909,3.08,0.68,11.5 +6.7,0.3,0.35,1.4,0.18,36.0,160.0,0.9937,3.11,0.54,9.4 +7.2,0.24,0.4,1.4,0.045,31.0,106.0,0.9914,2.88,0.38,10.8 +6.4,0.45,0.07,1.1,0.03,10.0,131.0,0.9905,2.97,0.28,10.8 +6.4,0.475,0.06,1.0,0.03,9.0,131.0,0.9904,2.97,0.29,10.8 +6.3,0.26,0.49,1.5,0.052,34.0,134.0,0.9924,2.99,0.61,9.8 +6.3,0.26,0.49,1.5,0.052,34.0,134.0,0.9924,2.99,0.61,9.8 +7.3,0.25,0.29,7.5,0.049,38.0,158.0,0.9965,3.43,0.38,9.6 +7.3,0.25,0.29,7.5,0.049,38.0,158.0,0.9965,3.43,0.38,9.6 +6.1,0.28,0.25,17.75,0.044,48.0,161.0,0.9993,3.34,0.48,9.5 +7.4,0.37,0.35,5.7,0.061,12.0,94.0,0.9965,3.48,0.69,10.7 +6.5,0.36,0.28,3.2,0.037,29.0,119.0,0.9908,3.25,0.65,12.4 +7.4,0.24,0.4,4.3,0.032,9.0,95.0,0.992,3.09,0.39,11.1 +7.5,0.23,0.68,11.0,0.047,37.0,133.0,0.9978,2.99,0.38,8.8 +7.5,0.21,0.68,10.9,0.045,38.0,133.0,0.9978,3.0,0.36,8.7 +7.5,0.21,0.68,10.9,0.045,38.0,133.0,0.9978,3.0,0.36,8.7 +7.5,0.23,0.68,11.0,0.047,37.0,133.0,0.9978,2.99,0.38,8.8 +7.8,0.32,0.33,2.4,0.037,18.0,101.0,0.9912,3.21,0.65,11.7 +7.8,0.26,0.27,1.9,0.051,52.0,195.0,0.9928,3.23,0.5,10.9 +7.7,0.24,0.27,1.8,0.051,52.0,190.0,0.9928,3.23,0.5,10.8 +7.4,0.19,0.3,1.4,0.057,33.0,135.0,0.993,3.12,0.5,9.6 +6.5,0.46,0.41,16.8,0.084,59.0,222.0,0.9993,3.18,0.58,9.0 +6.5,0.26,0.43,8.9,0.083,50.0,171.0,0.9965,2.85,0.5,9.0 +5.3,0.32,0.12,6.6,0.043,22.0,141.0,0.9937,3.36,0.6,10.4 +7.2,0.24,0.34,1.1,0.045,3.0,64.0,0.9913,3.23,0.51,11.4 +6.0,0.36,0.06,1.4,0.066,27.0,128.0,0.9934,3.26,0.55,9.3 +6.2,0.24,0.29,13.3,0.039,49.0,130.0,0.9952,3.33,0.46,11.0 +7.6,0.56,0.12,10.4,0.096,22.0,177.0,0.9983,3.32,0.45,9.1 +7.0,0.32,0.24,6.2,0.048,31.0,228.0,0.9957,3.23,0.62,9.4 +7.0,0.32,0.24,6.2,0.048,31.0,228.0,0.9957,3.23,0.62,9.4 +5.8,0.31,0.33,1.2,0.036,23.0,99.0,0.9916,3.18,0.6,10.5 +7.0,0.23,0.42,18.05,0.05,35.0,144.0,0.9999,3.22,0.42,8.8 +7.0,0.23,0.42,18.05,0.05,35.0,144.0,0.9999,3.22,0.42,8.8 +6.9,0.24,0.33,4.8,0.04,16.0,131.0,0.9936,3.26,0.64,10.7 +6.0,0.29,0.2,12.6,0.045,45.0,187.0,0.9972,3.33,0.42,9.5 +6.1,0.17,0.28,4.5,0.033,46.0,150.0,0.9933,3.43,0.49,10.9 +5.9,0.14,0.25,4.5,0.027,34.0,140.0,0.9934,3.49,0.51,10.8 +6.2,0.17,0.28,4.7,0.037,39.0,133.0,0.9931,3.41,0.46,10.8 +7.4,0.28,0.25,11.9,0.053,25.0,148.0,0.9976,3.1,0.62,9.2 +5.6,0.35,0.14,5.0,0.046,48.0,198.0,0.9937,3.3,0.71,10.3 +5.8,0.335,0.14,5.8,0.046,49.0,197.0,0.9937,3.3,0.71,10.3 +5.6,0.235,0.29,1.2,0.047,33.0,127.0,0.991,3.34,0.5,11.0 +6.1,0.28,0.25,12.9,0.054,34.0,189.0,0.9979,3.25,0.43,9.0 +6.3,0.21,0.33,13.9,0.046,68.0,179.0,0.9971,3.36,0.5,10.4 +6.4,0.24,0.28,11.5,0.05,34.0,163.0,0.9969,3.31,0.45,9.5 +6.4,0.24,0.29,11.4,0.051,32.0,166.0,0.9968,3.31,0.45,9.5 +6.3,0.26,0.25,7.8,0.058,44.0,166.0,0.9961,3.24,0.41,9.0 +6.5,0.33,0.72,1.1,0.061,7.0,151.0,0.993,3.09,0.57,9.5 +7.4,0.105,0.34,12.2,0.05,57.0,146.0,0.9973,3.16,0.37,9.0 +6.0,0.32,0.12,5.9,0.041,34.0,190.0,0.9944,3.16,0.72,10.0 +7.1,0.26,0.34,14.4,0.067,35.0,189.0,0.9986,3.07,0.53,9.1 +7.1,0.26,0.34,14.4,0.067,35.0,189.0,0.9986,3.07,0.53,9.1 +7.1,0.26,0.34,14.4,0.067,35.0,189.0,0.9986,3.07,0.53,9.1 +7.1,0.26,0.34,14.4,0.067,35.0,189.0,0.9986,3.07,0.53,9.1 +5.9,0.24,0.26,12.3,0.053,34.0,134.0,0.9972,3.34,0.45,9.5 +6.5,0.21,0.37,2.5,0.048,70.0,138.0,0.9917,3.33,0.75,11.4 +7.7,0.27,0.35,5.3,0.03,30.0,117.0,0.992,3.11,0.42,12.2 +9.0,0.27,0.35,4.9,0.028,27.0,95.0,0.9932,3.04,0.4,11.3 +7.3,0.34,0.21,3.2,0.05,14.0,136.0,0.9936,3.25,0.44,10.2 +6.6,0.27,0.25,3.1,0.052,41.0,188.0,0.9915,3.24,0.4,11.3 +6.8,0.29,0.16,1.4,0.038,122.5,234.5,0.9922,3.15,0.47,10.0 +7.1,0.28,0.26,1.9,0.049,12.0,86.0,0.9934,3.15,0.38,9.4 +6.8,0.25,0.34,14.0,0.032,47.0,133.0,0.9952,3.37,0.5,12.2 +7.0,0.57,0.1,8.3,0.094,23.0,188.0,0.9972,3.4,0.47,9.2 +7.1,0.28,0.26,1.9,0.049,12.0,86.0,0.9934,3.15,0.38,9.4 +7.1,0.17,0.38,7.4,0.052,49.0,182.0,0.9958,3.35,0.52,9.6 +7.8,0.28,0.22,1.4,0.056,24.0,130.0,0.9944,3.28,0.48,9.5 +6.8,0.22,0.37,1.7,0.036,38.0,195.0,0.9908,3.35,0.72,12.5 +7.1,0.17,0.38,7.4,0.052,49.0,182.0,0.9958,3.35,0.52,9.6 +6.1,0.14,0.25,1.3,0.047,37.0,173.0,0.9925,3.35,0.46,10.0 +6.4,0.24,0.5,11.6,0.047,60.0,211.0,0.9966,3.18,0.57,9.3 +7.8,0.42,0.26,9.2,0.058,34.0,199.0,0.9972,3.14,0.55,9.3 +6.6,0.28,0.36,1.7,0.038,22.0,101.0,0.9912,3.29,0.57,11.6 +7.1,0.32,0.34,14.5,0.039,46.0,150.0,0.995,3.38,0.5,12.5 +6.7,0.31,0.3,2.1,0.038,18.0,130.0,0.9928,3.36,0.63,10.6 +6.4,0.32,0.5,10.7,0.047,57.0,206.0,0.9968,3.08,0.6,9.4 +6.1,0.28,0.25,6.9,0.056,44.0,201.0,0.9955,3.19,0.4,9.1 +5.9,0.29,0.25,12.0,0.057,48.0,224.0,0.9981,3.23,0.41,9.0 +5.8,0.32,0.38,4.75,0.033,23.0,94.0,0.991,3.42,0.42,11.8 +5.8,0.32,0.38,4.75,0.033,23.0,94.0,0.991,3.42,0.42,11.8 +5.7,0.32,0.38,4.75,0.033,23.0,94.0,0.991,3.42,0.42,11.8 +6.7,0.28,0.14,1.4,0.043,64.0,159.0,0.992,3.17,0.39,10.0 +6.8,0.34,0.69,1.3,0.058,12.0,171.0,0.9931,3.06,0.47,9.7 +5.9,0.25,0.25,11.3,0.052,30.0,165.0,0.997,3.24,0.44,9.5 +6.4,0.27,0.32,4.5,0.24,61.0,174.0,0.9948,3.12,0.48,9.4 +8.1,0.46,0.31,1.7,0.052,50.0,183.0,0.9923,3.03,0.42,11.2 +6.2,0.36,0.26,13.2,0.051,54.0,201.0,0.9976,3.25,0.46,9.0 +6.8,0.22,0.35,5.5,0.043,21.0,114.0,0.9938,3.3,0.53,10.7 +6.8,0.67,0.3,13.0,0.29,22.0,193.0,0.9984,3.08,0.67,9.0 +7.2,0.28,0.3,10.7,0.044,61.0,222.0,0.9972,3.14,0.5,9.1 +6.7,0.17,0.37,2.0,0.039,34.0,125.0,0.9922,3.26,0.6,10.8 +6.9,0.2,0.34,1.9,0.043,25.0,136.0,0.9935,3.31,0.6,10.1 +6.1,0.36,0.16,6.4,0.037,36.0,198.0,0.9944,3.17,0.62,9.9 +6.0,0.36,0.16,6.3,0.036,36.0,191.0,0.9942,3.17,0.62,9.8 +5.9,0.37,0.14,6.3,0.036,34.0,185.0,0.9944,3.17,0.63,9.8 +7.6,0.29,0.58,17.5,0.041,51.0,225.0,0.9997,3.16,0.66,9.5 +6.3,0.34,0.28,14.7,0.047,49.0,198.0,0.9977,3.23,0.46,9.5 +6.7,0.19,0.34,1.0,0.022,22.0,94.0,0.9912,3.23,0.57,11.1 +7.5,0.31,0.51,14.8,0.039,62.0,204.0,0.9982,3.06,0.6,9.5 +7.5,0.31,0.51,14.8,0.039,62.0,204.0,0.9982,3.06,0.6,9.5 +7.4,0.31,0.48,14.2,0.042,62.0,204.0,0.9983,3.06,0.59,9.4 +8.4,0.4,0.7,13.1,0.042,29.0,197.0,0.998,3.06,0.64,9.7 +5.9,0.34,0.22,2.4,0.03,19.0,135.0,0.9894,3.41,0.78,13.9 +6.6,0.38,0.18,1.2,0.042,20.0,84.0,0.9927,3.22,0.45,10.1 +6.4,0.33,0.28,1.1,0.038,30.0,110.0,0.9917,3.12,0.42,10.5 +5.6,0.25,0.26,3.6,0.037,18.0,115.0,0.9904,3.42,0.5,12.6 +8.6,0.27,0.46,6.1,0.032,13.0,41.0,0.993,2.89,0.34,10.9 +6.2,0.31,0.21,6.3,0.041,50.0,218.0,0.9941,3.15,0.6,10.0 +7.2,0.18,0.45,4.4,0.046,57.0,166.0,0.9943,3.13,0.62,11.2 +7.7,0.2,0.44,13.9,0.05,44.0,130.0,0.99855,3.11,0.48,10.0 +6.2,0.47,0.21,1.0,0.044,13.0,98.0,0.99345,3.14,0.46,9.2 +6.1,0.25,0.24,12.1,0.046,51.0,172.0,0.998,3.35,0.45,9.5 +8.2,0.27,0.43,1.6,0.035,31.0,128.0,0.9916,3.1,0.5,12.3 +8.2,0.27,0.43,1.6,0.035,31.0,128.0,0.9916,3.1,0.5,12.3 +6.4,0.31,0.39,7.5,0.04,57.0,213.0,0.99475,3.32,0.43,10.0 +6.0,0.39,0.26,2.7,0.038,39.0,187.0,0.99325,3.41,0.5,10.8 +6.2,0.21,0.27,1.7,0.038,41.0,150.0,0.9933,3.49,0.71,10.5 +7.7,0.42,0.31,9.2,0.048,22.0,221.0,0.9969,3.06,0.61,9.2 +7.0,0.27,0.41,18.75,0.042,34.0,157.0,1.0002,2.96,0.5,9.1 +6.2,0.21,0.27,1.7,0.038,41.0,150.0,0.9933,3.49,0.71,10.5 +7.4,0.29,0.5,1.8,0.042,35.0,127.0,0.9937,3.45,0.5,10.2 +6.6,0.29,0.44,9.0,0.053,62.0,178.0,0.99685,3.02,0.45,8.9 +6.0,0.3,0.44,1.5,0.046,15.0,182.0,0.99455,3.5,0.52,10.4 +6.9,0.31,0.34,1.6,0.032,23.0,128.0,0.9917,3.37,0.47,11.7 +6.6,0.33,0.31,1.3,0.02,29.0,89.0,0.99035,3.26,0.44,12.4 +7.8,0.3,0.4,1.8,0.028,23.0,122.0,0.9914,3.14,0.39,10.9 +6.4,0.39,0.21,1.2,0.041,35.0,136.0,0.99225,3.15,0.46,10.2 +6.4,0.24,0.31,2.8,0.038,41.0,114.0,0.99155,3.37,0.66,11.7 +7.0,0.21,0.34,8.0,0.057,19.0,101.0,0.9954,2.99,0.59,9.4 +6.4,0.16,0.31,5.3,0.043,42.0,157.0,0.99455,3.35,0.47,10.5 +6.0,0.33,0.27,0.8,0.185,12.0,188.0,0.9924,3.12,0.62,9.4 +6.5,0.23,0.33,13.8,0.042,25.0,139.0,0.99695,3.35,0.56,10.4 +6.2,0.25,0.48,10.0,0.044,78.0,240.0,0.99655,3.25,0.47,9.5 +8.8,0.28,0.45,6.0,0.022,14.0,49.0,0.9934,3.01,0.33,11.1 +6.6,0.25,0.3,14.4,0.052,40.0,183.0,0.998,3.02,0.5,9.1 +6.9,0.38,0.25,9.8,0.04,28.0,191.0,0.9971,3.28,0.61,9.2 +6.4,0.25,0.3,5.5,0.038,15.0,129.0,0.9948,3.14,0.49,9.6 +6.6,0.25,0.3,14.4,0.052,40.0,183.0,0.998,3.02,0.5,9.1 +6.9,0.38,0.25,9.8,0.04,28.0,191.0,0.9971,3.28,0.61,9.2 +7.1,0.21,0.31,3.8,0.021,40.0,142.0,0.99215,3.17,0.39,10.8 +6.4,0.25,0.3,5.5,0.038,15.0,129.0,0.9948,3.14,0.49,9.6 +6.9,0.39,0.4,4.6,0.022,5.0,19.0,0.9915,3.31,0.37,12.6 +5.8,0.2,0.3,1.5,0.031,21.0,57.0,0.99115,3.44,0.55,11.0 +7.0,0.2,0.37,2.0,0.03,26.0,136.0,0.9932,3.28,0.61,10.2 +5.9,0.26,0.25,12.5,0.034,38.0,152.0,0.9977,3.33,0.43,9.4 +7.4,0.38,0.27,7.5,0.041,24.0,160.0,0.99535,3.17,0.43,10.0 +7.4,0.2,1.66,2.1,0.022,34.0,113.0,0.99165,3.26,0.55,12.2 +7.0,0.21,0.34,8.5,0.033,31.0,253.0,0.9953,3.22,0.56,10.5 +7.2,0.29,0.4,7.6,0.024,56.0,177.0,0.9928,3.04,0.32,11.5 +6.9,0.18,0.38,8.1,0.049,44.0,176.0,0.9958,3.3,0.54,9.8 +7.3,0.3,0.42,7.35,0.025,51.0,175.0,0.9928,3.04,0.32,11.4 +7.2,0.29,0.4,7.6,0.024,56.0,177.0,0.9928,3.04,0.32,11.5 +6.9,0.2,0.5,10.0,0.036,78.0,167.0,0.9964,3.15,0.55,10.2 +6.7,0.2,0.42,14.0,0.038,83.0,160.0,0.9987,3.16,0.5,9.4 +7.0,0.21,0.34,8.5,0.033,31.0,253.0,0.9953,3.22,0.56,10.5 +5.9,0.35,0.47,2.2,0.11,14.0,138.0,0.9932,3.09,0.5,9.1 +7.1,0.28,0.44,1.8,0.032,32.0,107.0,0.9907,3.25,0.48,12.2 +5.8,0.25,0.28,11.1,0.056,45.0,175.0,0.99755,3.42,0.43,9.5 +6.8,0.22,0.37,15.2,0.051,68.0,178.0,0.99935,3.4,0.85,9.3 +7.1,0.14,0.4,1.2,0.051,55.0,136.0,0.9932,3.3,0.96,9.8 +7.1,0.13,0.4,1.2,0.047,54.0,134.0,0.9932,3.3,0.97,9.8 +6.9,0.18,0.38,8.1,0.049,44.0,176.0,0.9958,3.3,0.54,9.8 +7.0,0.2,0.38,8.1,0.05,42.0,173.0,0.99585,3.3,0.54,9.8 +6.8,0.24,0.49,19.3,0.057,55.0,247.0,1.00055,3.0,0.56,8.7 +5.0,0.44,0.04,18.6,0.039,38.0,128.0,0.9985,3.37,0.57,10.2 +6.3,0.3,0.28,5.0,0.042,36.0,168.0,0.99505,3.22,0.69,9.5 +7.2,0.27,0.42,1.6,0.05,35.0,135.0,0.992,2.94,0.46,11.0 +6.7,0.5,0.63,13.4,0.078,81.0,238.0,0.9988,3.08,0.44,9.2 +6.8,0.2,0.36,1.6,0.028,7.0,46.0,0.99175,3.21,0.6,10.9 +6.7,0.11,0.34,8.8,0.043,41.0,113.0,0.9962,3.42,0.4,9.3 +6.7,0.11,0.34,8.8,0.043,41.0,113.0,0.9962,3.42,0.4,9.3 +6.8,0.12,0.31,5.2,0.045,29.0,120.0,0.9942,3.41,0.46,9.8 +6.6,0.16,0.57,1.1,0.13,58.0,140.0,0.9927,3.12,0.39,9.3 +6.6,0.21,0.6,1.1,0.135,61.0,144.0,0.9927,3.12,0.39,9.3 +6.1,0.27,0.3,16.7,0.039,49.0,172.0,0.99985,3.4,0.45,9.4 +9.1,0.27,0.45,10.6,0.035,28.0,124.0,0.997,3.2,0.46,10.4 +6.4,0.225,0.48,2.2,0.115,29.0,104.0,0.9918,3.24,0.58,12.1 +8.3,0.14,0.45,1.5,0.039,18.0,98.0,0.99215,3.02,0.56,11.0 +7.2,0.23,0.19,13.7,0.052,47.0,197.0,0.99865,3.12,0.53,9.0 +6.9,0.22,0.37,15.0,0.053,59.0,178.0,0.9992,3.37,0.82,9.5 +8.1,0.17,0.44,14.1,0.053,43.0,145.0,1.0006,3.28,0.75,8.8 +6.0,0.395,0.0,1.4,0.042,7.0,55.0,0.99135,3.37,0.38,11.2 +7.8,0.29,0.22,9.5,0.056,44.0,213.0,0.99715,3.08,0.61,9.3 +6.9,0.22,0.37,15.0,0.053,59.0,178.0,0.9992,3.37,0.82,9.5 +8.1,0.17,0.44,14.1,0.053,43.0,145.0,1.0006,3.28,0.75,8.8 +7.2,0.23,0.19,13.7,0.052,47.0,197.0,0.99865,3.12,0.53,9.0 +7.6,0.3,0.27,10.6,0.039,31.0,119.0,0.99815,3.27,0.3,9.3 +7.7,0.34,0.28,11.0,0.04,31.0,117.0,0.99815,3.27,0.29,9.2 +7.7,0.34,0.28,11.0,0.04,31.0,117.0,0.99815,3.27,0.29,9.2 +5.8,0.34,0.16,7.0,0.037,26.0,116.0,0.9949,3.46,0.45,10.0 +7.6,0.3,0.27,10.6,0.039,31.0,119.0,0.99815,3.27,0.3,9.3 +7.7,0.34,0.28,11.0,0.04,31.0,117.0,0.99815,3.27,0.29,9.2 +5.9,0.24,0.3,2.0,0.033,28.0,92.0,0.99225,3.39,0.69,10.9 +6.4,0.46,0.08,4.9,0.046,34.0,144.0,0.99445,3.1,0.56,10.0 +5.9,0.24,0.3,2.0,0.033,28.0,92.0,0.99225,3.39,0.69,10.9 +7.4,0.32,0.27,1.4,0.049,38.0,173.0,0.99335,3.03,0.52,9.3 +7.2,0.31,0.26,7.3,0.05,37.0,157.0,0.99625,3.09,0.43,9.0 +7.8,0.42,0.23,8.8,0.054,42.0,215.0,0.9971,3.02,0.58,9.2 +6.9,0.24,0.33,12.5,0.046,47.0,153.0,0.9983,3.28,0.77,9.6 +5.4,0.18,0.24,4.8,0.041,30.0,113.0,0.99445,3.42,0.4,9.4 +6.0,0.18,0.31,1.4,0.036,14.0,75.0,0.99085,3.34,0.58,11.1 +7.8,0.27,0.58,11.2,0.036,44.0,161.0,0.9977,3.06,0.41,8.9 +6.0,0.28,0.49,6.8,0.048,61.0,222.0,0.9953,3.19,0.47,9.3 +6.8,0.39,0.35,11.6,0.044,57.0,220.0,0.99775,3.07,0.53,9.3 +6.6,0.21,0.31,11.4,0.039,46.0,165.0,0.99795,3.41,0.44,9.8 +7.3,0.32,0.34,6.6,0.032,24.0,112.0,0.99505,3.22,0.46,9.8 +7.8,0.27,0.58,11.2,0.036,44.0,161.0,0.9977,3.06,0.41,8.9 +6.4,0.31,0.26,13.2,0.046,57.0,205.0,0.9975,3.17,0.41,9.6 +6.2,0.29,0.26,13.1,0.046,55.0,204.0,0.99745,3.16,0.41,9.6 +6.0,0.39,0.17,12.0,0.046,65.0,246.0,0.9976,3.15,0.38,9.0 +6.2,0.3,0.26,13.4,0.046,57.0,206.0,0.99775,3.17,0.43,9.5 +6.0,0.28,0.49,6.8,0.048,61.0,222.0,0.9953,3.19,0.47,9.3 +6.0,0.41,0.05,1.5,0.063,17.0,120.0,0.9932,3.21,0.56,9.2 +6.4,0.35,0.28,1.1,0.055,9.0,160.0,0.99405,3.42,0.5,9.1 +6.5,0.26,0.32,16.5,0.045,44.0,166.0,1.0,3.38,0.46,9.5 +7.9,0.35,0.24,15.6,0.072,44.0,229.0,0.99785,3.03,0.59,10.5 +6.2,0.3,0.17,2.8,0.04,24.0,125.0,0.9939,3.01,0.46,9.0 +8.4,0.18,0.42,5.1,0.036,7.0,77.0,0.9939,3.16,0.52,11.7 +6.6,0.56,0.22,8.9,0.034,27.0,133.0,0.99675,3.2,0.51,9.1 +6.2,0.3,0.17,2.8,0.04,24.0,125.0,0.9939,3.01,0.46,9.0 +6.6,0.56,0.22,8.9,0.034,27.0,133.0,0.99675,3.2,0.51,9.1 +6.6,0.36,0.29,1.6,0.021,24.0,85.0,0.98965,3.41,0.61,12.4 +7.3,0.655,0.2,10.2,0.071,28.0,212.0,0.9971,2.96,0.58,9.2 +6.8,0.18,0.21,5.4,0.053,34.0,104.0,0.99445,3.3,0.43,9.4 +6.7,0.19,0.23,6.2,0.047,36.0,117.0,0.9945,3.34,0.43,9.6 +8.4,0.18,0.42,5.1,0.036,7.0,77.0,0.9939,3.16,0.52,11.7 +7.0,0.21,0.37,7.2,0.042,36.0,167.0,0.9958,3.26,0.56,9.8 +6.8,0.25,0.38,8.1,0.046,24.0,155.0,0.9956,3.33,0.59,10.2 +7.4,0.24,0.36,2.0,0.031,27.0,139.0,0.99055,3.28,0.48,12.5 +7.1,0.16,0.36,10.7,0.044,20.0,90.0,0.9959,3.16,0.44,10.9 +7.1,0.16,0.36,1.2,0.043,21.0,90.0,0.9925,3.16,0.42,11.0 +7.3,0.205,0.31,1.7,0.06,34.0,110.0,0.9963,3.72,0.69,10.5 +7.4,0.17,0.4,5.5,0.037,34.0,161.0,0.9935,3.05,0.62,11.5 +7.3,0.3,0.34,2.7,0.044,34.0,108.0,0.99105,3.36,0.53,12.8 +6.9,0.25,0.34,1.3,0.035,27.0,82.0,0.99045,3.18,0.44,12.2 +7.3,0.205,0.31,1.7,0.06,34.0,110.0,0.9963,3.72,0.69,10.5 +7.5,0.42,0.34,4.3,0.04,34.0,108.0,0.99155,3.14,0.45,12.8 +7.3,0.25,0.36,2.1,0.034,30.0,177.0,0.99085,3.25,0.4,11.9 +7.3,0.25,0.36,2.1,0.034,30.0,177.0,0.99085,3.25,0.4,11.9 +7.3,0.25,0.36,2.1,0.034,30.0,177.0,0.99085,3.25,0.4,11.9 +7.5,0.34,0.35,6.0,0.034,12.0,126.0,0.9924,3.16,0.39,12.0 +7.6,0.33,0.35,6.3,0.036,12.0,126.0,0.9924,3.16,0.39,12.0 +8.7,0.23,0.32,13.4,0.044,35.0,169.0,0.99975,3.12,0.47,8.8 +8.7,0.23,0.32,13.4,0.044,35.0,169.0,0.99975,3.12,0.47,8.8 +6.9,0.19,0.35,1.7,0.036,33.0,101.0,0.99315,3.21,0.54,10.8 +7.3,0.21,0.29,1.6,0.034,29.0,118.0,0.9917,3.3,0.5,11.0 +7.3,0.21,0.29,1.6,0.034,29.0,118.0,0.9917,3.3,0.5,11.0 +6.6,0.22,0.37,15.4,0.035,62.0,153.0,0.99845,3.02,0.4,9.3 +9.2,0.34,0.27,1.2,0.026,17.0,73.0,0.9921,3.08,0.39,10.8 +8.7,0.23,0.32,13.4,0.044,35.0,169.0,0.99975,3.12,0.47,8.8 +6.0,0.2,0.24,1.8,0.03,30.0,105.0,0.9909,3.31,0.47,11.5 +6.9,0.19,0.35,1.7,0.036,33.0,101.0,0.99315,3.21,0.54,10.8 +8.2,0.38,0.49,13.6,0.042,58.0,166.0,0.99855,3.1,0.54,9.4 +6.9,0.18,0.36,1.3,0.036,40.0,117.0,0.9934,3.27,0.95,9.5 +7.7,0.34,0.58,11.1,0.039,41.0,151.0,0.9978,3.06,0.49,8.6 +6.9,0.18,0.36,1.3,0.036,40.0,117.0,0.9934,3.27,0.95,9.5 +7.4,0.2,0.35,2.1,0.038,30.0,116.0,0.9949,3.49,0.77,10.3 +8.2,0.38,0.49,13.6,0.042,58.0,166.0,0.99855,3.1,0.54,9.4 +8.2,0.4,0.48,13.7,0.042,59.0,169.0,0.9986,3.1,0.52,9.4 +6.7,0.22,0.39,10.2,0.038,60.0,149.0,0.99725,3.17,0.54,10.0 +6.6,0.3,0.3,4.8,0.17,60.0,166.0,0.9946,3.18,0.47,9.4 +8.1,0.27,0.35,1.7,0.03,38.0,103.0,0.99255,3.22,0.63,10.4 +7.3,0.25,0.42,14.2,0.041,57.0,182.0,0.9996,3.29,0.75,9.1 +4.8,0.34,0.0,6.5,0.028,33.0,163.0,0.9939,3.36,0.61,9.9 +6.2,0.28,0.33,1.7,0.029,24.0,111.0,0.99,3.24,0.5,12.1 +4.8,0.33,0.0,6.5,0.028,34.0,163.0,0.9937,3.35,0.61,9.9 +6.1,0.27,0.33,2.2,0.021,26.0,117.0,0.9886,3.12,0.3,12.5 +6.9,0.18,0.36,1.3,0.036,40.0,117.0,0.9934,3.27,0.95,9.5 +7.8,0.18,0.46,12.6,0.042,41.0,143.0,1.0,3.24,0.76,8.5 +7.3,0.28,0.42,14.4,0.04,49.0,173.0,0.9994,3.28,0.82,9.0 +7.3,0.24,0.29,1.2,0.037,37.0,97.0,0.9926,3.19,0.7,10.1 +6.0,0.45,0.65,9.7,0.08,11.0,159.0,0.9956,3.04,0.48,9.4 +7.7,0.34,0.58,11.1,0.039,41.0,151.0,0.9978,3.06,0.49,8.6 +6.3,0.26,0.21,4.0,0.03,24.0,125.0,0.9915,3.06,0.34,10.7 +10.3,0.17,0.47,1.4,0.037,5.0,33.0,0.9939,2.89,0.28,9.6 +7.7,0.15,0.29,1.3,0.029,10.0,64.0,0.9932,3.35,0.39,10.1 +7.1,0.21,0.32,2.2,0.037,28.0,141.0,0.993,3.2,0.57,10.0 +6.9,0.36,0.34,4.2,0.018,57.0,119.0,0.9898,3.28,0.36,12.7 +6.0,0.28,0.34,1.6,0.119,33.0,104.0,0.9921,3.19,0.38,10.2 +6.2,0.16,0.54,1.4,0.126,37.0,110.0,0.9932,3.23,0.37,8.9 +6.9,0.12,0.36,2.2,0.037,18.0,111.0,0.9919,3.41,0.82,11.9 +7.1,0.21,0.32,2.2,0.037,28.0,141.0,0.993,3.2,0.57,10.0 +8.8,0.36,0.44,1.9,0.04,9.0,121.0,0.9953,3.19,0.48,9.9 +7.4,0.26,0.43,6.0,0.022,22.0,125.0,0.9928,3.13,0.55,11.5 +7.4,0.26,0.43,6.0,0.022,22.0,125.0,0.9928,3.13,0.55,11.5 +6.8,0.23,0.29,12.2,0.035,38.0,236.0,0.9976,3.35,0.52,9.8 +6.1,0.34,0.27,2.6,0.024,20.0,105.0,0.9906,3.4,0.67,12.2 +7.3,0.26,0.31,1.6,0.04,39.0,173.0,0.9918,3.19,0.51,11.4 +6.5,0.3,0.32,2.0,0.044,34.0,90.0,0.99185,3.37,0.68,11.0 +7.3,0.26,0.31,1.6,0.04,39.0,173.0,0.9918,3.19,0.51,11.4 +6.5,0.3,0.32,2.0,0.044,34.0,90.0,0.99185,3.37,0.68,11.0 +5.0,0.31,0.0,6.4,0.046,43.0,166.0,0.994,3.3,0.63,9.9 +5.8,0.26,0.18,1.2,0.031,40.0,114.0,0.9908,3.42,0.4,11.0 +5.9,0.26,0.3,1.0,0.036,38.0,114.0,0.9928,3.58,0.48,9.4 +7.0,0.31,0.29,1.4,0.037,33.0,128.0,0.9896,3.12,0.36,12.2 +5.8,0.26,0.18,1.2,0.031,40.0,114.0,0.9908,3.42,0.4,11.0 +5.6,0.19,0.39,1.1,0.043,17.0,67.0,0.9918,3.23,0.53,10.3 +6.8,0.18,0.28,8.7,0.047,52.0,242.0,0.9952,3.22,0.53,10.5 +7.0,0.29,0.26,1.6,0.044,12.0,87.0,0.9923,3.08,0.46,10.5 +6.6,0.26,0.29,1.4,0.039,13.0,67.0,0.9915,3.05,0.49,10.9 +6.8,0.18,0.28,8.5,0.047,52.0,242.0,0.9952,3.22,0.53,10.5 +6.6,0.2,0.38,7.9,0.052,30.0,145.0,0.9947,3.32,0.56,11.0 +8.0,0.29,0.29,13.2,0.046,26.0,113.0,0.9983,3.25,0.37,9.7 +6.1,0.28,0.35,12.8,0.048,63.0,229.0,0.9975,3.08,0.4,8.9 +5.9,0.31,0.3,7.7,0.047,60.0,206.0,0.995,3.2,0.39,9.6 +6.9,0.21,0.28,2.4,0.056,49.0,159.0,0.9944,3.02,0.47,8.8 +8.4,0.19,0.42,1.6,0.047,9.0,101.0,0.994,3.06,0.65,11.1 +8.3,0.27,0.45,1.3,0.048,8.0,72.0,0.9944,3.08,0.61,10.3 +7.1,0.25,0.39,2.1,0.036,30.0,124.0,0.9908,3.28,0.43,12.2 +8.0,0.23,0.37,9.6,0.054,23.0,159.0,0.99795,3.32,0.47,9.8 +7.5,0.24,0.31,13.0,0.049,46.0,217.0,0.9985,3.08,0.53,8.8 +6.3,0.33,0.2,5.8,0.04,24.0,144.0,0.99425,3.15,0.63,9.9 +6.2,0.33,0.19,5.6,0.042,22.0,143.0,0.99425,3.15,0.63,9.9 +6.3,0.34,0.19,5.8,0.041,22.0,145.0,0.9943,3.15,0.63,9.9 +5.8,0.29,0.05,0.8,0.038,11.0,30.0,0.9924,3.36,0.35,9.2 +8.0,0.32,0.26,1.2,0.05,11.5,88.0,0.9946,3.24,0.37,9.5 +5.6,0.29,0.05,0.8,0.038,11.0,30.0,0.9924,3.36,0.35,9.2 +7.4,0.13,0.39,4.7,0.042,36.0,137.0,0.995,3.36,0.56,10.3 +7.7,0.3,0.32,1.6,0.037,23.0,124.0,0.9919,2.93,0.33,11.0 +7.0,0.24,0.34,1.4,0.031,27.0,107.0,0.99,3.06,0.39,11.9 +8.6,0.18,0.4,1.1,0.04,20.0,107.0,0.9923,2.94,0.32,10.2 +7.0,0.11,0.32,4.6,0.057,59.0,144.0,0.9956,3.55,0.44,9.4 +7.7,0.32,0.62,10.6,0.036,56.0,153.0,0.9978,3.13,0.44,8.9 +7.7,0.32,0.62,10.6,0.036,56.0,153.0,0.9978,3.13,0.44,8.9 +6.5,0.26,0.27,12.9,0.044,69.0,215.0,0.9967,3.17,0.43,10.0 +7.9,0.28,0.41,2.0,0.044,50.0,152.0,0.9934,3.45,0.49,10.7 +6.3,0.27,0.23,2.9,0.047,13.0,100.0,0.9936,3.28,0.43,9.8 +5.4,0.595,0.1,2.8,0.042,26.0,80.0,0.9932,3.36,0.38,9.3 +6.7,0.25,0.33,2.9,0.057,52.0,173.0,0.9934,3.02,0.48,9.5 +6.5,0.25,0.35,12.0,0.055,47.0,179.0,0.998,3.58,0.47,10.0 +6.1,0.36,0.58,15.0,0.044,42.0,115.0,0.9978,3.15,0.51,9.0 +7.7,0.17,0.52,5.9,0.017,21.0,84.0,0.9929,3.14,0.4,11.9 +6.4,0.26,0.43,12.6,0.033,64.0,230.0,0.9974,3.08,0.38,8.9 +6.5,0.26,0.28,12.5,0.046,80.0,225.0,0.99685,3.18,0.41,10.0 +5.9,0.29,0.33,7.4,0.037,58.0,205.0,0.99495,3.26,0.41,9.6 +6.2,0.28,0.43,13.0,0.039,64.0,233.0,0.99745,3.08,0.38,8.9 +6.1,0.27,0.44,6.7,0.041,61.0,230.0,0.99505,3.12,0.4,8.9 +6.4,0.43,0.32,1.4,0.048,10.0,67.0,0.992,3.08,0.41,11.4 +6.1,0.36,0.58,15.0,0.044,42.0,115.0,0.9978,3.15,0.51,9.0 +6.2,0.35,0.29,7.3,0.044,56.0,244.0,0.9956,3.36,0.55,10.0 +7.7,0.24,0.29,15.3,0.044,39.0,194.0,0.9982,3.06,0.47,9.6 +6.2,0.34,0.28,7.5,0.034,40.0,197.0,0.99485,3.14,0.6,9.7 +6.3,0.27,0.46,11.75,0.037,61.0,212.0,0.9971,3.25,0.53,9.5 +5.4,0.415,0.19,1.6,0.039,27.0,88.0,0.99265,3.54,0.41,10.0 +6.9,0.48,0.36,3.5,0.03,31.0,135.0,0.9904,3.14,0.38,12.2 +6.5,0.18,0.33,8.0,0.051,16.0,131.0,0.9965,3.28,0.44,8.7 +6.7,0.15,0.29,5.0,0.058,28.0,105.0,0.9946,3.52,0.44,10.2 +8.2,0.345,1.0,18.2,0.047,55.0,205.0,0.99965,2.96,0.43,9.6 +8.5,0.16,0.35,1.6,0.039,24.0,147.0,0.9935,2.96,0.36,10.0 +6.8,0.705,0.25,3.2,0.048,10.0,57.0,0.996,3.36,0.52,9.5 +7.3,0.25,0.39,6.4,0.034,8.0,84.0,0.9942,3.18,0.46,11.5 +7.6,0.345,0.26,1.9,0.043,15.0,134.0,0.9936,3.08,0.38,9.5 +7.6,0.22,0.34,9.7,0.035,26.0,143.0,0.9965,3.08,0.49,9.8 +6.5,0.17,0.33,1.4,0.028,14.0,99.0,0.9928,3.23,0.55,10.1 +8.2,0.23,0.37,1.3,0.042,39.0,117.0,0.9928,2.99,0.36,10.0 +7.6,0.22,0.34,9.7,0.035,26.0,143.0,0.9965,3.08,0.49,9.8 +7.6,0.345,0.26,1.9,0.043,15.0,134.0,0.9936,3.08,0.38,9.5 +7.5,0.32,0.26,1.8,0.042,13.0,133.0,0.9938,3.07,0.38,9.5 +6.6,0.23,0.32,0.9,0.041,25.0,79.0,0.9926,3.39,0.54,10.2 +6.6,0.2,0.32,1.1,0.039,25.0,78.0,0.9926,3.39,0.54,10.2 +7.3,0.24,0.34,15.4,0.05,38.0,174.0,0.9983,3.03,0.42,9.0 +7.3,0.24,0.34,15.4,0.05,38.0,174.0,0.9983,3.03,0.42,9.0 +8.0,0.42,0.36,5.0,0.037,34.0,101.0,0.992,3.13,0.57,12.3 +7.3,0.24,0.34,15.4,0.05,38.0,174.0,0.9983,3.03,0.42,9.0 +6.1,0.19,0.25,4.0,0.023,23.0,112.0,0.9923,3.37,0.51,11.6 +5.9,0.26,0.21,12.5,0.034,36.0,152.0,0.9972,3.28,0.43,9.5 +8.3,0.23,0.43,3.2,0.035,14.0,101.0,0.9928,3.15,0.36,11.5 +6.5,0.34,0.28,1.8,0.041,43.0,188.0,0.9928,3.13,0.37,9.6 +6.8,0.22,0.35,17.5,0.039,38.0,153.0,0.9994,3.24,0.42,9.0 +6.5,0.08,0.33,1.9,0.028,23.0,93.0,0.991,3.34,0.7,12.0 +5.5,0.42,0.09,1.6,0.019,18.0,68.0,0.9906,3.33,0.51,11.4 +5.1,0.42,0.01,1.5,0.017,25.0,102.0,0.9894,3.38,0.36,12.3 +6.0,0.27,0.19,1.7,0.02,24.0,110.0,0.9898,3.32,0.47,12.6 +6.8,0.22,0.35,17.5,0.039,38.0,153.0,0.9994,3.24,0.42,9.0 +6.5,0.08,0.33,1.9,0.028,23.0,93.0,0.991,3.34,0.7,12.0 +7.1,0.13,0.38,1.8,0.046,14.0,114.0,0.9925,3.32,0.9,11.7 +7.6,0.3,0.25,4.3,0.054,22.0,111.0,0.9956,3.12,0.49,9.2 +6.6,0.13,0.3,4.9,0.058,47.0,131.0,0.9946,3.51,0.45,10.3 +6.5,0.14,0.33,7.6,0.05,53.0,189.0,0.9966,3.25,0.49,8.6 +7.7,0.28,0.33,6.7,0.037,32.0,155.0,0.9951,3.39,0.62,10.7 +6.0,0.2,0.71,1.6,0.15,10.0,54.0,0.9927,3.12,0.47,9.8 +6.0,0.19,0.71,1.5,0.152,9.0,55.0,0.9927,3.12,0.46,9.8 +7.7,0.28,0.33,6.7,0.037,32.0,155.0,0.9951,3.39,0.62,10.7 +5.1,0.39,0.21,1.7,0.027,15.0,72.0,0.9894,3.5,0.45,12.5 +5.7,0.36,0.34,4.2,0.026,21.0,77.0,0.9907,3.41,0.45,11.9 +6.9,0.19,0.33,1.6,0.043,63.0,149.0,0.9925,3.44,0.52,10.8 +6.0,0.41,0.21,1.9,0.05,29.0,122.0,0.9928,3.42,0.52,10.5 +7.4,0.28,0.3,5.3,0.054,44.0,161.0,0.9941,3.12,0.48,10.3 +7.4,0.3,0.3,5.2,0.053,45.0,163.0,0.9941,3.12,0.45,10.3 +6.9,0.19,0.33,1.6,0.043,63.0,149.0,0.9925,3.44,0.52,10.8 +7.7,0.28,0.39,8.9,0.036,8.0,117.0,0.9935,3.06,0.38,12.0 +8.6,0.16,0.38,3.4,0.04,41.0,143.0,0.9932,2.95,0.39,10.2 +8.2,0.26,0.44,1.3,0.046,7.0,69.0,0.9944,3.14,0.62,10.2 +6.5,0.25,0.27,15.2,0.049,75.0,217.0,0.9972,3.19,0.39,9.9 +7.0,0.24,0.18,1.3,0.046,9.0,62.0,0.994,3.38,0.47,10.1 +8.6,0.18,0.36,1.8,0.04,24.0,187.0,0.9956,3.25,0.55,9.5 +7.8,0.27,0.34,1.6,0.046,27.0,154.0,0.9927,3.05,0.45,10.5 +6.0,0.26,0.34,1.3,0.046,6.0,29.0,0.9924,3.29,0.63,10.4 +6.1,0.24,0.27,9.8,0.062,33.0,152.0,0.9966,3.31,0.47,9.5 +8.0,0.24,0.3,17.45,0.056,43.0,184.0,0.9997,3.05,0.5,9.2 +7.6,0.21,0.6,2.1,0.046,47.0,165.0,0.9936,3.05,0.54,10.1 +8.0,0.19,0.36,1.8,0.05,16.0,84.0,0.9936,3.15,0.45,9.8 +6.4,0.28,0.41,6.8,0.045,61.0,216.0,0.9952,3.09,0.46,9.4 +6.4,0.28,0.43,7.1,0.045,60.0,221.0,0.9952,3.09,0.45,9.4 +6.9,0.24,0.39,1.3,0.063,18.0,136.0,0.9928,3.31,0.48,10.4 +5.8,0.36,0.26,3.3,0.038,40.0,153.0,0.9911,3.34,0.55,11.3 +6.6,0.18,0.28,3.3,0.044,18.0,91.0,0.993,3.42,0.64,10.8 +5.8,0.36,0.26,3.3,0.038,40.0,153.0,0.9911,3.34,0.55,11.3 +5.1,0.52,0.06,2.7,0.052,30.0,79.0,0.9932,3.32,0.43,9.3 +6.6,0.22,0.37,1.2,0.059,45.0,199.0,0.993,3.37,0.55,10.3 +8.3,0.15,0.39,1.3,0.055,32.0,146.0,0.993,3.08,0.39,10.5 +7.6,0.16,0.44,1.4,0.043,25.0,109.0,0.9932,3.11,0.75,10.3 +7.7,0.16,0.41,1.7,0.048,60.0,173.0,0.9932,3.24,0.66,11.2 +8.3,0.16,0.48,1.7,0.057,31.0,98.0,0.9943,3.15,0.41,10.3 +6.2,0.25,0.47,11.6,0.048,62.0,210.0,0.9968,3.19,0.5,9.5 +6.1,0.16,0.27,12.6,0.064,63.0,162.0,0.9994,3.66,0.43,8.9 +7.6,0.39,0.22,2.8,0.036,19.0,113.0,0.9926,3.03,0.29,10.2 +6.8,0.37,0.47,11.2,0.071,44.0,136.0,0.9968,2.98,0.88,9.2 +7.6,0.16,0.44,1.4,0.043,25.0,109.0,0.9932,3.11,0.75,10.3 +7.1,0.18,0.42,1.4,0.045,47.0,157.0,0.9916,2.95,0.31,10.5 +8.3,0.14,0.26,1.5,0.049,56.0,189.0,0.9946,3.21,0.62,9.5 +8.6,0.2,0.42,1.5,0.041,35.0,125.0,0.9925,3.11,0.49,11.4 +8.6,0.2,0.42,1.5,0.041,35.0,125.0,0.9925,3.11,0.49,11.4 +6.8,0.19,0.32,7.05,0.019,54.0,188.0,0.9935,3.25,0.37,11.1 +7.6,0.19,0.38,10.6,0.06,48.0,174.0,0.9962,3.13,0.38,10.5 +6.8,0.34,0.74,2.8,0.088,23.0,185.0,0.9928,3.51,0.7,12.0 +6.2,0.15,0.46,1.6,0.039,38.0,123.0,0.993,3.38,0.51,9.7 +6.6,0.14,0.44,1.6,0.042,47.0,140.0,0.993,3.32,0.51,10.2 +8.0,0.55,0.17,8.2,0.04,13.0,60.0,0.9956,3.09,0.3,9.5 +7.0,0.24,0.35,1.5,0.052,51.0,128.0,0.9941,3.41,0.59,10.4 +6.3,0.6,0.44,11.0,0.05,50.0,245.0,0.9972,3.19,0.57,9.3 +7.1,0.2,0.41,2.1,0.054,24.0,166.0,0.9948,3.48,0.62,10.5 +6.2,0.34,0.29,7.6,0.047,45.0,232.0,0.9955,3.35,0.62,10.0 +7.1,0.3,0.36,6.8,0.055,44.5,234.0,0.9972,3.49,0.64,10.2 +7.1,0.3,0.36,6.8,0.055,44.5,234.0,0.9972,3.49,0.64,10.2 +7.9,0.64,0.46,10.6,0.244,33.0,227.0,0.9983,2.87,0.74,9.1 +8.8,0.17,0.38,1.8,0.04,39.0,148.0,0.9942,3.16,0.67,10.2 +7.5,0.17,0.37,1.5,0.06,18.0,75.0,0.9936,3.54,0.88,10.7 +7.1,0.47,0.24,6.0,0.044,11.0,77.0,0.9956,3.21,0.56,9.7 +7.1,0.15,0.34,5.3,0.034,33.0,104.0,0.9953,3.37,0.52,9.3 +7.5,0.17,0.34,1.4,0.035,13.0,102.0,0.9918,3.05,0.74,11.0 +8.2,0.68,0.3,2.1,0.047,17.0,138.0,0.995,3.22,0.71,10.8 +7.7,0.275,0.3,1.0,0.039,19.0,75.0,0.992,3.01,0.56,10.7 +7.3,0.49,0.32,5.2,0.043,18.0,104.0,0.9952,3.24,0.45,10.7 +7.5,0.33,0.48,19.45,0.048,55.0,243.0,1.001,2.95,0.4,8.8 +7.2,0.21,0.37,1.6,0.049,23.0,94.0,0.9924,3.16,0.48,10.9 +7.3,0.15,0.4,2.0,0.05,24.0,92.0,0.9932,3.14,0.45,10.5 +6.5,0.19,0.1,1.3,0.046,23.0,107.0,0.9937,3.29,0.45,10.0 +7.0,0.31,0.52,1.7,0.029,5.0,61.0,0.9918,3.07,0.43,10.4 +8.3,0.4,0.38,1.1,0.038,15.0,75.0,0.9934,3.03,0.43,9.2 +6.1,0.37,0.36,4.7,0.035,36.0,116.0,0.991,3.31,0.62,12.6 +7.3,0.24,0.34,7.5,0.048,29.0,152.0,0.9962,3.1,0.54,9.0 +6.9,0.21,0.81,1.1,0.137,52.0,123.0,0.9932,3.03,0.39,9.2 +7.6,0.29,0.42,1.3,0.035,18.0,86.0,0.9908,2.99,0.39,11.3 +9.4,0.29,0.55,2.2,0.05,17.0,119.0,0.9962,3.12,0.69,10.3 +7.0,0.31,0.52,1.7,0.029,5.0,61.0,0.9918,3.07,0.43,10.4 +8.6,0.26,0.41,2.2,0.049,29.0,111.0,0.9941,2.96,0.44,10.0 +7.5,0.21,0.34,1.2,0.06,26.0,111.0,0.9931,3.51,0.47,10.7 +7.2,0.51,0.24,10.0,0.093,35.0,197.0,0.9981,3.41,0.47,9.0 +7.5,0.21,0.34,1.2,0.06,26.0,111.0,0.9931,3.51,0.47,10.7 +5.3,0.3,0.2,1.1,0.077,48.0,166.0,0.9944,3.3,0.54,8.7 +8.0,0.26,0.36,2.0,0.054,30.0,121.0,0.992,3.09,0.72,11.6 +7.0,0.21,0.28,7.5,0.07,45.0,185.0,0.9966,3.34,0.55,9.4 +6.7,0.26,0.26,4.0,0.079,35.5,216.0,0.9956,3.31,0.68,9.5 +6.7,0.26,0.26,4.1,0.073,36.0,202.0,0.9956,3.3,0.67,9.5 +8.1,0.26,0.37,1.9,0.072,48.0,159.0,0.9949,3.37,0.7,10.9 +8.3,0.22,0.38,14.8,0.054,32.0,126.0,1.0002,3.22,0.5,9.7 +6.4,0.3,0.51,5.5,0.048,62.0,172.0,0.9942,3.08,0.45,9.1 +7.5,0.19,0.34,2.6,0.037,33.0,125.0,0.9923,3.1,0.49,11.1 +8.8,0.33,0.44,6.35,0.024,9.0,87.0,0.9917,2.96,0.4,12.6 +6.9,0.2,0.36,1.5,0.031,38.0,147.0,0.9931,3.35,0.56,11.0 +8.0,0.37,0.32,1.6,0.04,32.0,166.0,0.992,3.0,0.55,11.3 +8.3,0.22,0.38,14.8,0.054,32.0,126.0,1.0002,3.22,0.5,9.7 +8.2,0.29,0.33,9.1,0.036,28.0,118.0,0.9953,2.96,0.4,10.9 +7.7,0.34,0.3,8.0,0.048,25.0,192.0,0.9951,2.97,0.47,10.9 +6.2,0.55,0.45,12.0,0.049,27.0,186.0,0.9974,3.17,0.5,9.3 +6.4,0.4,0.19,3.2,0.033,28.0,124.0,0.9904,3.22,0.54,12.7 +7.5,0.28,0.33,7.7,0.048,42.0,180.0,0.9974,3.37,0.59,10.1 +7.8,0.26,0.44,1.3,0.037,43.0,132.0,0.9944,3.18,0.65,10.0 +6.5,0.26,0.34,16.3,0.051,56.0,197.0,1.0004,3.49,0.42,9.8 +6.3,0.34,0.29,6.2,0.046,29.0,227.0,0.9952,3.29,0.53,10.1 +6.8,0.15,0.33,4.7,0.059,31.0,118.0,0.9956,3.43,0.39,9.0 +6.3,0.27,0.25,5.8,0.038,52.0,155.0,0.995,3.28,0.38,9.4 +6.3,0.27,0.25,5.8,0.038,52.0,155.0,0.995,3.28,0.38,9.4 +7.4,0.2,0.37,16.95,0.048,43.0,190.0,0.9995,3.03,0.42,9.2 +6.3,0.23,0.21,5.1,0.035,29.0,142.0,0.9942,3.36,0.33,10.1 +7.3,0.31,0.69,10.2,0.041,58.0,160.0,0.9977,3.06,0.45,8.6 +5.2,0.24,0.45,3.8,0.027,21.0,128.0,0.992,3.55,0.49,11.2 +7.0,0.24,0.32,1.3,0.037,39.0,123.0,0.992,3.17,0.42,11.2 +7.4,0.2,0.37,16.95,0.048,43.0,190.0,0.9995,3.03,0.42,9.2 +7.0,0.17,0.33,4.0,0.034,17.0,127.0,0.9934,3.19,0.39,10.6 +8.3,0.21,0.58,17.1,0.049,62.0,213.0,1.0006,3.01,0.51,9.3 +7.2,0.21,0.35,14.5,0.048,35.0,178.0,0.9982,3.05,0.47,8.9 +7.1,0.21,0.4,1.2,0.069,24.0,156.0,0.9928,3.42,0.43,10.6 +8.4,0.17,0.31,6.7,0.038,29.0,132.0,0.9945,3.1,0.32,10.6 +7.4,0.24,0.31,8.4,0.045,52.0,183.0,0.9963,3.09,0.32,8.8 +5.3,0.24,0.33,1.3,0.033,25.0,97.0,0.9906,3.59,0.38,11.0 +6.5,0.28,0.26,8.8,0.04,44.0,139.0,0.9956,3.32,0.37,10.2 +6.3,0.23,0.21,5.1,0.035,29.0,142.0,0.9942,3.36,0.33,10.1 +6.5,0.29,0.25,10.6,0.039,32.0,120.0,0.9962,3.31,0.34,10.1 +5.8,0.29,0.21,2.6,0.025,12.0,120.0,0.9894,3.39,0.79,14.0 +6.3,0.27,0.25,5.8,0.038,52.0,155.0,0.995,3.28,0.38,9.4 +6.3,0.17,0.42,2.8,0.028,45.0,107.0,0.9908,3.27,0.43,11.8 +6.3,0.16,0.4,1.6,0.033,59.0,148.0,0.9914,3.44,0.53,11.4 +7.9,0.29,0.39,6.7,0.036,6.0,117.0,0.9938,3.12,0.42,10.7 +7.3,0.31,0.69,10.2,0.041,58.0,160.0,0.9977,3.06,0.45,8.6 +5.5,0.32,0.45,4.9,0.028,25.0,191.0,0.9922,3.51,0.49,11.5 +5.2,0.24,0.45,3.8,0.027,21.0,128.0,0.992,3.55,0.49,11.2 +7.2,0.37,0.15,2.0,0.029,27.0,87.0,0.9903,3.3,0.59,12.6 +6.1,0.29,0.27,1.7,0.024,13.0,76.0,0.9893,3.21,0.51,12.6 +9.2,0.22,0.4,2.4,0.054,18.0,151.0,0.9952,3.04,0.46,9.3 +7.2,0.37,0.15,2.0,0.029,27.0,87.0,0.9903,3.3,0.59,12.6 +8.0,0.18,0.37,1.3,0.04,15.0,96.0,0.9912,3.06,0.61,12.1 +6.5,0.22,0.34,12.0,0.053,55.0,177.0,0.9983,3.52,0.44,9.9 +7.4,0.18,0.4,1.6,0.047,22.0,102.0,0.9937,3.28,0.44,10.7 +6.5,0.52,0.17,1.4,0.047,5.0,26.0,0.9932,3.26,0.32,10.0 +7.0,0.15,0.38,2.2,0.047,33.0,96.0,0.9928,3.13,0.39,10.4 +5.9,0.415,0.13,1.4,0.04,11.0,64.0,0.9922,3.29,0.52,10.5 +8.1,0.45,0.34,8.3,0.037,33.0,216.0,0.9976,3.31,0.64,9.7 +5.8,0.415,0.13,1.4,0.04,11.0,64.0,0.9922,3.29,0.52,10.5 +6.4,0.5,0.16,12.9,0.042,26.0,138.0,0.9974,3.28,0.33,9.0 +6.7,0.105,0.32,12.4,0.051,34.0,106.0,0.998,3.54,0.45,9.2 +6.0,0.4,0.3,1.6,0.047,30.0,117.0,0.9931,3.17,0.48,10.1 +6.6,0.25,0.39,1.45,0.04,40.0,89.0,0.9911,3.35,0.4,11.4 +9.8,0.36,0.45,1.6,0.042,11.0,124.0,0.9944,2.93,0.46,10.8 +9.6,0.23,0.4,1.5,0.044,19.0,135.0,0.9937,2.96,0.49,10.9 +6.3,0.55,0.45,13.0,0.047,33.0,182.0,0.9974,3.2,0.46,9.2 +6.5,0.115,0.29,1.95,0.038,73.0,166.0,0.989,3.12,0.25,12.9 +6.4,0.125,0.29,5.85,0.042,24.0,99.0,0.992,3.23,0.32,12.0 +5.7,0.1,0.27,1.3,0.047,21.0,100.0,0.9928,3.27,0.46,9.5 +7.9,0.25,0.29,5.3,0.031,33.0,117.0,0.9918,3.06,0.32,11.8 +6.9,0.2,0.28,1.2,0.048,36.0,159.0,0.9936,3.19,0.43,9.1 +6.9,0.23,0.34,4.0,0.047,24.0,128.0,0.9944,3.2,0.52,9.7 +6.8,0.39,0.31,14.35,0.043,28.0,162.0,0.9988,3.17,0.54,9.1 +8.7,0.22,0.42,2.3,0.053,27.0,114.0,0.994,2.99,0.43,10.0 +7.4,0.41,0.34,4.7,0.042,19.0,127.0,0.9953,3.25,0.42,10.4 +6.7,0.25,0.34,12.85,0.048,30.0,161.0,0.9986,3.44,0.47,9.5 +6.0,0.26,0.42,5.2,0.027,70.0,178.0,0.9914,3.4,0.4,12.3 +6.1,0.31,0.37,8.4,0.031,70.0,170.0,0.9934,3.42,0.4,11.7 +9.2,0.28,0.46,3.2,0.058,39.0,133.0,0.996,3.14,0.58,9.5 +9.0,0.31,0.49,6.9,0.034,26.0,91.0,0.9937,2.99,0.34,11.5 +8.5,0.16,0.33,1.0,0.076,17.0,57.0,0.9921,3.14,0.46,10.6 +9.3,0.34,0.49,7.3,0.052,30.0,146.0,0.998,3.17,0.61,10.2 +9.2,0.28,0.46,3.2,0.058,39.0,133.0,0.996,3.14,0.58,9.5 +7.2,0.24,0.3,1.6,0.048,27.0,131.0,0.9933,3.25,0.45,10.5 +7.2,0.25,0.32,1.5,0.047,27.0,132.0,0.9933,3.26,0.44,10.5 +6.8,0.32,0.18,7.5,0.041,71.0,223.0,0.9959,3.14,0.41,8.9 +9.1,0.27,0.32,1.1,0.031,15.0,151.0,0.9936,3.03,0.41,10.6 +8.9,0.34,0.32,1.3,0.041,12.0,188.0,0.9953,3.17,0.49,9.5 +7.0,0.17,0.37,5.7,0.025,29.0,111.0,0.9938,3.2,0.49,10.8 +6.7,0.25,0.23,7.2,0.038,61.0,220.0,0.9952,3.14,0.35,9.5 +6.9,0.32,0.17,7.6,0.042,69.0,219.0,0.9959,3.13,0.4,8.9 +6.8,0.32,0.18,7.5,0.041,71.0,223.0,0.9959,3.14,0.41,8.9 +6.1,0.6,0.0,1.3,0.042,24.0,79.0,0.9937,3.31,0.38,9.4 +5.3,0.395,0.07,1.3,0.035,26.0,102.0,0.992,3.5,0.35,10.6 +7.9,0.16,0.3,4.8,0.037,37.0,171.0,0.9967,3.47,0.44,9.0 +7.6,0.33,0.36,2.1,0.034,26.0,172.0,0.9944,3.42,0.48,10.5 +7.8,0.3,0.29,16.85,0.054,23.0,135.0,0.9998,3.16,0.38,9.0 +7.8,0.3,0.29,16.85,0.054,23.0,135.0,0.9998,3.16,0.38,9.0 +5.7,0.26,0.27,4.1,0.201,73.5,189.5,0.9942,3.27,0.38,9.4 +7.8,0.3,0.29,16.85,0.054,23.0,135.0,0.9998,3.16,0.38,9.0 +7.5,0.14,0.34,1.3,0.055,50.0,153.0,0.9945,3.29,0.8,9.6 +7.8,0.3,0.29,16.85,0.054,23.0,135.0,0.9998,3.16,0.38,9.0 +6.6,0.25,0.41,7.4,0.043,29.0,151.0,0.9946,3.15,0.6,10.2 +5.7,0.26,0.27,4.1,0.201,73.5,189.5,0.9942,3.27,0.38,9.4 +8.2,0.23,0.49,0.9,0.057,15.0,73.0,0.9928,3.07,0.38,10.4 +6.0,0.24,0.32,6.3,0.03,34.0,129.0,0.9946,3.52,0.41,10.4 +6.1,0.45,0.27,0.8,0.039,13.0,82.0,0.9927,3.23,0.32,9.5 +7.4,0.23,0.43,1.4,0.044,22.0,113.0,0.9938,3.22,0.62,10.6 +7.2,0.2,0.38,1.0,0.037,21.0,74.0,0.9918,3.21,0.37,11.0 +7.5,0.14,0.34,1.3,0.055,50.0,153.0,0.9945,3.29,0.8,9.6 +7.7,0.25,0.43,4.5,0.062,20.0,115.0,0.9966,3.38,0.5,9.9 +8.2,0.61,0.45,5.4,0.03,15.0,118.0,0.9954,3.14,0.34,9.6 +7.6,0.21,0.44,1.9,0.036,10.0,119.0,0.9913,3.01,0.7,12.8 +7.4,0.22,0.33,2.0,0.045,31.0,101.0,0.9931,3.42,0.55,11.4 +7.2,0.26,0.26,12.7,0.036,49.0,214.0,0.9986,3.41,0.5,10.0 +6.4,0.25,0.41,8.6,0.042,57.0,173.0,0.9965,3.0,0.44,9.1 +6.3,0.32,0.35,11.1,0.039,29.0,198.0,0.9984,3.36,0.5,9.4 +6.8,0.25,0.29,2.0,0.042,19.0,189.0,0.9952,3.46,0.54,10.2 +9.8,0.44,0.4,2.8,0.036,35.0,167.0,0.9956,2.97,0.39,9.2 +7.2,0.2,0.25,4.5,0.044,31.0,109.0,0.9949,3.23,0.36,9.4 +8.2,0.61,0.45,5.4,0.03,15.0,118.0,0.9954,3.14,0.34,9.6 +7.5,0.42,0.45,9.1,0.029,20.0,125.0,0.996,3.12,0.36,10.1 +7.4,0.22,0.33,2.0,0.045,31.0,101.0,0.9931,3.42,0.55,11.4 +6.4,0.26,0.3,2.2,0.025,33.0,134.0,0.992,3.21,0.47,10.6 +7.9,0.46,0.32,4.1,0.033,40.0,138.0,0.9912,3.18,0.44,12.8 +6.5,0.41,0.64,11.8,0.065,65.0,225.0,0.9978,3.12,0.51,8.9 +7.5,0.32,0.37,1.2,0.048,22.0,184.0,0.9938,3.09,0.43,9.3 +6.6,0.21,0.38,2.2,0.026,40.0,104.0,0.9914,3.25,0.4,11.1 +7.1,0.21,0.3,1.4,0.037,45.0,143.0,0.9932,3.13,0.33,9.9 +7.6,0.26,0.47,1.6,0.068,5.0,55.0,0.9944,3.1,0.45,9.6 +7.6,0.21,0.44,1.9,0.036,10.0,119.0,0.9913,3.01,0.7,12.8 +6.9,0.25,0.26,5.2,0.024,36.0,135.0,0.9948,3.16,0.72,10.7 +7.1,0.26,0.32,14.45,0.074,29.0,107.0,0.998,2.96,0.42,9.2 +7.3,0.22,0.4,14.75,0.042,44.5,129.5,0.9998,3.36,0.41,9.1 +6.2,0.37,0.22,8.3,0.025,36.0,216.0,0.9964,3.33,0.6,9.6 +7.9,0.22,0.45,14.2,0.038,53.0,141.0,0.9992,3.03,0.46,9.2 +6.9,0.25,0.26,5.2,0.024,36.0,135.0,0.9948,3.16,0.72,10.7 +7.3,0.22,0.4,14.75,0.042,44.5,129.5,0.9998,3.36,0.41,9.1 +7.1,0.26,0.32,14.45,0.074,29.0,107.0,0.998,2.96,0.42,9.2 +7.4,0.25,0.37,6.9,0.02,14.0,93.0,0.9939,3.0,0.48,10.7 +6.8,0.18,0.37,1.5,0.027,37.0,93.0,0.992,3.3,0.45,10.8 +7.0,0.17,0.37,1.5,0.028,26.0,75.0,0.9922,3.3,0.46,10.8 +6.4,0.3,0.38,7.8,0.046,35.0,192.0,0.9955,3.1,0.37,9.0 +5.0,0.33,0.16,1.5,0.049,10.0,97.0,0.9917,3.48,0.44,10.7 +5.0,0.33,0.16,1.5,0.049,10.0,97.0,0.9917,3.48,0.44,10.7 +8.9,0.33,0.32,1.5,0.047,11.0,200.0,0.9954,3.19,0.46,9.4 +7.0,0.26,0.46,15.55,0.037,61.0,171.0,0.9986,2.94,0.35,8.8 +6.4,0.3,0.38,7.8,0.046,35.0,192.0,0.9955,3.1,0.37,9.0 +6.3,0.21,0.4,1.7,0.031,48.0,134.0,0.9917,3.42,0.49,11.5 +8.0,0.23,0.46,1.5,0.03,30.0,125.0,0.9907,3.23,0.47,12.5 +9.2,0.28,0.41,1.0,0.042,14.0,59.0,0.9922,2.96,0.25,10.5 +7.3,0.27,0.39,6.7,0.064,28.0,188.0,0.9958,3.29,0.3,9.7 +7.6,0.32,0.36,1.6,0.04,32.0,155.0,0.993,3.23,0.52,11.3 +5.0,0.33,0.16,1.5,0.049,10.0,97.0,0.9917,3.48,0.44,10.7 +9.7,0.24,0.45,1.2,0.033,11.0,59.0,0.9926,2.74,0.47,10.8 +8.0,0.28,0.42,7.1,0.045,41.0,169.0,0.9959,3.17,0.43,10.6 +8.2,0.37,0.36,1.0,0.034,17.0,93.0,0.9906,3.04,0.32,11.7 +8.0,0.61,0.38,12.1,0.301,24.0,220.0,0.9993,2.94,0.48,9.2 +7.2,0.26,0.44,7.1,0.027,25.0,126.0,0.993,3.02,0.34,11.1 +8.2,0.37,0.36,1.0,0.034,17.0,93.0,0.9906,3.04,0.32,11.7 +6.4,0.23,0.33,1.15,0.044,15.5,217.5,0.992,3.33,0.44,11.0 +5.9,0.4,0.32,6.0,0.034,50.0,127.0,0.992,3.51,0.58,12.5 +7.6,0.28,0.39,1.2,0.038,21.0,115.0,0.994,3.16,0.67,10.0 +8.0,0.28,0.42,7.1,0.045,41.0,169.0,0.9959,3.17,0.43,10.6 +7.2,0.23,0.39,2.3,0.033,29.0,102.0,0.9908,3.26,0.54,12.3 +6.8,0.32,0.37,3.4,0.023,19.0,87.0,0.9902,3.14,0.53,12.7 +7.2,0.23,0.39,2.3,0.033,29.0,102.0,0.9908,3.26,0.54,12.3 +6.9,0.18,0.38,6.5,0.039,20.0,110.0,0.9943,3.1,0.42,10.5 +9.4,0.26,0.53,1.2,0.047,25.0,109.0,0.9921,3.23,0.28,12.5 +8.3,0.33,0.42,1.15,0.033,18.0,96.0,0.9911,3.2,0.32,12.4 +7.3,0.29,0.3,13.0,0.043,46.0,238.0,0.9986,3.06,0.41,8.7 +7.9,0.41,0.37,4.5,0.03,40.0,114.0,0.992,3.17,0.54,12.4 +7.9,0.44,0.37,5.85,0.033,27.0,93.0,0.992,3.16,0.54,12.6 +7.7,0.39,0.3,5.2,0.037,29.0,131.0,0.9943,3.38,0.44,11.0 +7.7,0.26,0.31,1.3,0.043,47.0,155.0,0.9937,3.42,0.5,10.1 +7.8,0.32,0.31,1.7,0.036,46.0,195.0,0.993,3.03,0.48,10.5 +6.8,0.32,0.37,3.4,0.023,19.0,87.0,0.9902,3.14,0.53,12.7 +7.3,0.24,0.39,3.6,0.024,35.0,116.0,0.9928,3.17,0.51,10.9 +7.1,0.44,0.37,2.7,0.041,35.0,128.0,0.9896,3.07,0.43,13.5 +10.3,0.25,0.48,2.2,0.042,28.0,164.0,0.998,3.19,0.59,9.7 +7.9,0.14,0.28,1.8,0.041,44.0,178.0,0.9954,3.45,0.43,9.2 +7.4,0.18,0.42,2.1,0.036,33.0,187.0,0.9938,3.4,0.41,10.6 +8.1,0.43,0.42,6.6,0.033,36.0,141.0,0.9918,2.98,0.39,13.3 +7.1,0.44,0.37,2.7,0.041,35.0,128.0,0.9896,3.07,0.43,13.5 +6.4,0.26,0.22,5.1,0.037,23.0,131.0,0.9944,3.29,0.32,10.1 +8.0,0.66,0.72,17.55,0.042,62.0,233.0,0.9999,2.92,0.68,9.4 +8.0,0.2,0.4,5.2,0.055,41.0,167.0,0.9953,3.18,0.4,10.6 +7.2,0.21,0.34,1.1,0.046,25.0,80.0,0.992,3.25,0.4,11.3 +7.2,0.18,0.31,1.1,0.045,20.0,73.0,0.9925,3.32,0.4,10.8 +8.4,0.57,0.44,10.7,0.051,46.0,195.0,0.9981,3.15,0.51,10.4 +5.3,0.26,0.23,5.15,0.034,48.0,160.0,0.9952,3.82,0.51,10.5 +5.7,0.245,0.33,1.1,0.049,28.0,150.0,0.9927,3.13,0.42,9.3 +5.6,0.245,0.32,1.1,0.047,24.0,152.0,0.9927,3.12,0.42,9.3 +7.3,0.25,0.41,1.8,0.037,52.0,165.0,0.9911,3.29,0.39,12.2 +7.0,0.16,0.73,1.0,0.138,58.0,150.0,0.9936,3.08,0.3,9.2 +6.4,0.22,0.34,1.8,0.057,29.0,104.0,0.9959,3.81,0.57,10.3 +7.3,0.18,0.65,1.4,0.046,28.0,157.0,0.9946,3.33,0.62,9.4 +6.4,0.17,0.27,6.7,0.036,88.0,223.0,0.9948,3.28,0.35,10.2 +6.9,0.29,0.16,6.8,0.034,65.0,212.0,0.9955,3.08,0.39,9.0 +6.2,0.21,0.38,6.8,0.036,64.0,245.0,0.9951,3.06,0.36,9.3 +6.4,0.23,0.3,7.1,0.037,63.0,236.0,0.9952,3.06,0.34,9.2 +7.3,0.19,0.68,1.5,0.05,31.0,156.0,0.9946,3.32,0.64,9.4 +7.3,0.18,0.65,1.4,0.046,28.0,157.0,0.9946,3.33,0.62,9.4 +9.6,0.29,0.46,1.45,0.039,77.5,223.0,0.9944,2.92,0.46,9.5 +7.2,0.14,0.35,1.2,0.036,15.0,73.0,0.9938,3.46,0.39,9.9 +6.9,0.31,0.34,7.4,0.059,36.0,174.0,0.9963,3.46,0.62,11.1 +7.5,0.28,0.34,4.2,0.028,36.0,116.0,0.991,2.99,0.41,12.3 +8.0,0.22,0.42,14.6,0.044,45.0,163.0,1.0003,3.21,0.69,8.6 +7.6,0.31,0.29,10.5,0.04,21.0,145.0,0.9966,3.04,0.35,9.4 +8.4,0.35,0.56,13.8,0.048,55.0,190.0,0.9993,3.07,0.58,9.4 +8.0,0.22,0.42,14.6,0.044,45.0,163.0,1.0003,3.21,0.69,8.6 +8.1,0.5,0.47,1.1,0.037,23.0,126.0,0.9938,3.21,0.42,10.9 +7.0,0.39,0.31,5.3,0.169,32.0,162.0,0.9965,3.2,0.48,9.4 +8.1,0.5,0.47,1.1,0.037,23.0,126.0,0.9938,3.21,0.42,10.9 +8.4,0.35,0.56,13.8,0.048,55.0,190.0,0.9993,3.07,0.58,9.4 +6.2,0.22,0.27,1.5,0.064,20.0,132.0,0.9938,3.22,0.46,9.2 +8.0,0.22,0.42,14.6,0.044,45.0,163.0,1.0003,3.21,0.69,8.6 +7.6,0.31,0.29,10.5,0.04,21.0,145.0,0.9966,3.04,0.35,9.4 +7.0,0.24,0.36,4.9,0.083,10.0,133.0,0.9942,3.33,0.37,10.8 +6.6,0.27,0.3,1.9,0.025,14.0,153.0,0.9928,3.29,0.62,10.5 +7.8,0.16,0.41,1.7,0.026,29.0,140.0,0.991,3.02,0.78,12.5 +7.7,0.27,0.34,1.8,0.028,26.0,168.0,0.9911,2.99,0.48,12.1 +7.4,0.31,0.74,10.7,0.039,51.0,147.0,0.9977,3.02,0.43,8.7 +8.0,0.45,0.36,8.8,0.026,50.0,151.0,0.9927,3.07,0.25,12.7 +7.7,0.27,0.34,1.8,0.028,26.0,168.0,0.9911,2.99,0.48,12.1 +7.8,0.16,0.41,1.7,0.026,29.0,140.0,0.991,3.02,0.78,12.5 +6.6,0.16,0.29,1.8,0.05,40.0,147.0,0.9912,3.06,0.44,11.4 +8.3,0.21,0.4,1.6,0.032,35.0,110.0,0.9907,3.02,0.6,12.9 +7.2,0.32,0.33,1.4,0.029,29.0,109.0,0.9902,3.15,0.51,12.8 +6.6,0.16,0.3,1.6,0.034,15.0,78.0,0.992,3.38,0.44,11.2 +8.4,0.16,0.33,1.5,0.033,16.0,98.0,0.994,3.14,0.42,9.7 +7.5,0.23,0.32,9.2,0.038,54.0,191.0,0.9966,3.04,0.56,9.7 +6.2,0.17,0.3,1.1,0.037,14.0,79.0,0.993,3.5,0.54,10.3 +6.9,0.39,0.22,4.3,0.03,10.0,102.0,0.993,3.0,0.87,11.6 +6.9,0.41,0.22,4.2,0.031,10.0,102.0,0.993,3.0,0.86,11.6 +7.5,0.23,0.32,9.2,0.038,54.0,191.0,0.9966,3.04,0.56,9.7 +7.5,0.38,0.33,5.0,0.045,30.0,131.0,0.9942,3.32,0.44,10.9 +7.3,0.42,0.38,6.8,0.045,29.0,122.0,0.9925,3.19,0.37,12.6 +7.3,0.34,0.39,5.2,0.04,45.0,163.0,0.9925,3.3,0.47,12.4 +7.8,0.23,0.28,4.75,0.042,45.0,166.0,0.9928,2.96,0.4,11.5 +9.0,0.245,0.38,5.9,0.045,52.0,159.0,0.995,2.93,0.35,10.2 +6.9,0.2,0.4,7.7,0.032,51.0,176.0,0.9939,3.22,0.27,11.4 +7.4,0.19,0.42,6.4,0.067,39.0,212.0,0.9958,3.3,0.33,9.6 +8.2,0.2,0.36,8.1,0.035,60.0,163.0,0.9952,3.05,0.3,10.3 +8.0,0.59,0.71,17.35,0.038,61.0,228.0,1.0,2.95,0.75,9.3 +7.9,0.14,0.45,1.8,0.05,17.0,114.0,0.9948,3.33,0.49,10.7 +6.8,0.24,0.4,1.8,0.047,34.0,105.0,0.99,3.13,0.49,12.8 +9.7,0.14,0.59,1.5,0.049,23.0,142.0,0.9958,2.98,0.62,9.5 +9.2,0.15,0.68,1.6,0.046,22.0,130.0,0.9948,3.02,0.45,10.4 +9.4,0.17,0.55,1.6,0.049,14.0,94.0,0.9949,3.02,0.61,10.3 +5.2,0.365,0.08,13.5,0.041,37.0,142.0,0.997,3.46,0.39,9.9 +6.3,0.23,0.22,3.75,0.039,37.0,116.0,0.9927,3.23,0.5,10.7 +9.6,0.25,0.54,1.3,0.04,16.0,160.0,0.9938,2.94,0.43,10.5 +9.2,0.32,0.42,1.3,0.046,14.0,186.0,0.9949,3.08,0.48,9.6 +6.4,0.31,0.4,6.2,0.04,46.0,169.0,0.9953,3.15,0.46,9.3 +8.1,0.2,0.36,9.7,0.044,63.0,162.0,0.997,3.1,0.46,10.0 +7.9,0.255,0.26,2.0,0.026,40.0,190.0,0.9932,3.04,0.39,11.2 +7.0,0.15,0.34,1.4,0.039,21.0,177.0,0.9927,3.32,0.62,10.8 +6.4,0.15,0.31,1.1,0.044,25.0,96.0,0.9932,3.54,0.51,10.3 +6.4,0.25,0.53,6.6,0.038,59.0,234.0,0.9955,3.03,0.42,8.8 +7.6,0.19,0.42,1.5,0.044,6.0,114.0,0.9914,3.04,0.74,12.8 +7.3,0.43,0.37,4.6,0.028,17.0,114.0,0.991,3.23,0.43,13.2 +5.1,0.31,0.3,0.9,0.037,28.0,152.0,0.992,3.54,0.56,10.1 +6.2,0.2,0.26,1.7,0.093,40.0,161.0,0.9924,3.44,0.66,11.0 +6.9,0.16,0.35,1.3,0.043,21.0,182.0,0.9927,3.25,0.62,10.8 +7.7,0.32,0.48,2.3,0.04,28.0,114.0,0.9911,3.2,0.52,12.8 +6.5,0.22,0.72,6.8,0.042,33.0,168.0,0.9958,3.12,0.36,9.2 +6.8,0.26,0.33,1.5,0.047,44.0,167.0,0.9928,3.12,0.44,10.5 +5.2,0.37,0.33,1.2,0.028,13.0,81.0,0.9902,3.37,0.38,11.7 +8.4,0.19,0.43,2.1,0.052,20.0,104.0,0.994,2.85,0.46,9.5 +8.3,0.21,0.41,2.2,0.05,24.0,108.0,0.994,2.85,0.45,9.5 +6.8,0.15,0.32,8.8,0.058,24.0,110.0,0.9972,3.4,0.4,8.8 +7.9,0.16,0.64,17.0,0.05,69.0,210.0,1.0004,3.15,0.51,9.3 +7.8,0.21,0.39,1.8,0.034,62.0,180.0,0.991,3.09,0.75,12.6 +9.0,0.24,0.5,1.2,0.048,26.0,107.0,0.9918,3.21,0.34,12.4 +5.7,0.21,0.24,2.3,0.047,60.0,189.0,0.995,3.65,0.72,10.1 +7.8,0.29,0.36,7.0,0.042,38.0,161.0,0.9941,3.26,0.37,11.2 +6.7,0.18,0.3,6.4,0.048,40.0,251.0,0.9956,3.29,0.52,10.0 +6.7,0.18,0.3,6.4,0.048,40.0,251.0,0.9956,3.29,0.52,10.0 +8.4,0.58,0.27,12.15,0.033,37.0,116.0,0.9959,2.99,0.39,10.8 +7.2,0.16,0.32,0.8,0.04,50.0,121.0,0.9922,3.27,0.33,10.0 +7.6,0.54,0.23,2.0,0.029,13.0,151.0,0.9931,3.04,0.33,10.4 +8.4,0.58,0.27,12.15,0.033,37.0,116.0,0.9959,2.99,0.39,10.8 +6.6,0.25,0.31,12.4,0.059,52.0,181.0,0.9984,3.51,0.47,9.8 +7.3,0.23,0.37,1.9,0.041,51.0,165.0,0.9908,3.26,0.4,12.2 +7.3,0.39,0.37,1.1,0.043,36.0,113.0,0.991,3.39,0.48,12.7 +7.0,0.46,0.39,6.2,0.039,46.0,163.0,0.9928,3.21,0.35,12.2 +8.2,0.35,0.4,6.3,0.039,35.0,162.0,0.9936,3.15,0.34,11.9 +7.8,0.29,0.36,7.0,0.042,38.0,161.0,0.9941,3.26,0.37,11.2 +9.2,0.35,0.39,0.9,0.042,15.0,61.0,0.9924,2.96,0.28,10.4 +8.0,0.57,0.39,3.9,0.034,22.0,122.0,0.9917,3.29,0.67,12.8 +6.5,0.37,0.33,3.9,0.027,40.0,130.0,0.9906,3.28,0.39,12.7 +5.7,0.21,0.24,2.3,0.047,60.0,189.0,0.995,3.65,0.72,10.1 +6.7,0.18,0.3,6.4,0.048,40.0,251.0,0.9956,3.29,0.52,10.0 +7.8,0.13,0.3,1.8,0.04,43.0,179.0,0.9955,3.43,0.41,9.0 +7.6,0.19,0.41,1.1,0.04,38.0,143.0,0.9907,2.92,0.42,11.4 +7.3,0.22,0.41,15.4,0.05,55.0,191.0,1.0,3.32,0.59,8.9 +6.3,0.29,0.4,6.5,0.039,43.0,167.0,0.9953,3.15,0.44,9.3 +6.8,0.35,0.32,2.4,0.048,35.0,103.0,0.9911,3.28,0.46,12.0 +6.5,0.19,0.32,1.4,0.04,31.0,132.0,0.9922,3.36,0.54,10.8 +6.2,0.12,0.26,5.7,0.044,56.0,158.0,0.9951,3.52,0.37,10.5 +6.0,0.13,0.28,5.7,0.038,56.0,189.5,0.9948,3.59,0.43,10.6 +6.4,0.25,0.33,1.4,0.04,42.0,115.0,0.9906,3.19,0.48,11.3 +6.9,0.32,0.16,1.4,0.051,15.0,96.0,0.994,3.22,0.38,9.5 +7.6,0.19,0.41,1.1,0.04,38.0,143.0,0.9907,2.92,0.42,11.4 +6.7,0.13,0.28,1.2,0.046,35.0,140.0,0.9927,3.33,0.33,10.1 +7.0,0.14,0.41,0.9,0.037,22.0,95.0,0.9914,3.25,0.43,10.9 +7.6,0.27,0.24,3.8,0.058,19.0,115.0,0.9958,3.15,0.45,8.9 +7.3,0.22,0.41,15.4,0.05,55.0,191.0,1.0,3.32,0.59,8.9 +7.4,0.64,0.47,14.15,0.168,42.0,185.0,0.9984,2.9,0.49,9.3 +7.6,0.28,0.39,1.9,0.052,23.0,116.0,0.9941,3.25,0.4,10.4 +8.3,0.26,0.41,9.2,0.042,41.0,162.0,0.9944,3.1,0.38,12.0 +10.7,0.22,0.56,8.2,0.044,37.0,181.0,0.998,2.87,0.68,9.5 +10.7,0.22,0.56,8.2,0.044,37.0,181.0,0.998,2.87,0.68,9.5 +6.9,0.23,0.34,2.7,0.032,24.0,121.0,0.9902,3.14,0.38,12.4 +6.2,0.3,0.32,1.7,0.032,30.0,130.0,0.9911,3.28,0.41,11.2 +6.9,0.27,0.41,1.7,0.047,6.0,134.0,0.9929,3.15,0.69,11.4 +6.9,0.28,0.41,1.7,0.05,10.0,136.0,0.993,3.16,0.71,11.4 +6.9,0.28,0.3,1.6,0.047,46.0,132.0,0.9918,3.35,0.38,11.1 +6.9,0.46,0.2,0.9,0.054,5.0,126.0,0.992,3.1,0.42,10.4 +6.9,0.38,0.32,8.5,0.044,36.0,152.0,0.9932,3.38,0.35,12.0 +5.7,0.43,0.3,5.7,0.039,24.0,98.0,0.992,3.54,0.61,12.3 +6.6,0.56,0.16,3.1,0.045,28.0,92.0,0.994,3.12,0.35,9.1 +7.1,0.36,0.56,1.3,0.046,25.0,102.0,0.9923,3.24,0.33,10.5 +6.8,0.23,0.4,1.6,0.047,5.0,133.0,0.993,3.23,0.7,11.4 +6.2,0.33,0.29,1.3,0.042,26.0,138.0,0.9956,3.77,0.64,9.5 +5.6,0.49,0.13,4.5,0.039,17.0,116.0,0.9907,3.42,0.9,13.7 +6.6,0.42,0.33,2.8,0.034,15.0,85.0,0.99,3.28,0.51,13.4 +7.3,0.18,0.29,1.2,0.044,12.0,143.0,0.9918,3.2,0.48,11.3 +8.1,0.19,0.4,0.9,0.037,73.0,180.0,0.9926,3.06,0.34,10.0 +5.9,0.19,0.26,7.4,0.034,33.0,123.0,0.995,3.49,0.42,10.1 +6.2,0.16,0.47,1.4,0.029,23.0,81.0,0.99,3.26,0.42,12.2 +6.6,0.42,0.33,2.8,0.034,15.0,85.0,0.99,3.28,0.51,13.4 +5.7,0.135,0.3,4.6,0.042,19.0,101.0,0.9946,3.31,0.42,9.3 +5.6,0.49,0.13,4.5,0.039,17.0,116.0,0.9907,3.42,0.9,13.7 +6.9,0.19,0.33,1.6,0.039,27.0,98.0,0.9898,3.09,0.46,12.3 +7.3,0.18,0.29,1.2,0.044,12.0,143.0,0.9918,3.2,0.48,11.3 +7.3,0.25,0.36,13.1,0.05,35.0,200.0,0.9986,3.04,0.46,8.9 +7.3,0.25,0.36,13.1,0.05,35.0,200.0,0.9986,3.04,0.46,8.9 +7.0,0.2,0.34,5.7,0.035,32.0,83.0,0.9928,3.19,0.46,11.5 +7.3,0.25,0.36,13.1,0.05,35.0,200.0,0.9986,3.04,0.46,8.9 +6.3,0.67,0.48,12.6,0.052,57.0,222.0,0.9979,3.17,0.52,9.3 +7.4,0.4,0.29,5.4,0.044,31.0,122.0,0.994,3.3,0.5,11.1 +7.1,0.26,0.31,2.2,0.044,29.0,128.0,0.9937,3.34,0.64,10.9 +9.0,0.31,0.48,6.6,0.043,11.0,73.0,0.9938,2.9,0.38,11.6 +6.3,0.39,0.24,6.9,0.069,9.0,117.0,0.9942,3.15,0.35,10.2 +8.2,0.22,0.36,6.8,0.034,12.0,90.0,0.9944,3.01,0.38,10.5 +7.1,0.19,0.28,3.6,0.033,16.0,78.0,0.993,2.91,0.78,11.4 +7.3,0.25,0.36,13.1,0.05,35.0,200.0,0.9986,3.04,0.46,8.9 +7.9,0.2,0.34,1.2,0.04,29.0,118.0,0.9932,3.14,0.41,10.6 +7.1,0.26,0.32,5.9,0.037,39.0,97.0,0.9934,3.31,0.4,11.6 +7.0,0.2,0.34,5.7,0.035,32.0,83.0,0.9928,3.19,0.46,11.5 +6.9,0.3,0.33,4.1,0.035,26.0,155.0,0.9925,3.25,0.79,12.3 +8.1,0.29,0.49,7.1,0.042,22.0,124.0,0.9944,3.14,0.41,10.8 +5.8,0.17,0.3,1.4,0.037,55.0,130.0,0.9909,3.29,0.38,11.3 +5.9,0.415,0.02,0.8,0.038,22.0,63.0,0.9932,3.36,0.36,9.3 +6.6,0.23,0.26,1.3,0.045,16.0,128.0,0.9934,3.36,0.6,10.0 +8.6,0.55,0.35,15.55,0.057,35.5,366.5,1.0001,3.04,0.63,11.0 +6.9,0.35,0.74,1.0,0.044,18.0,132.0,0.992,3.13,0.55,10.2 +7.6,0.14,0.74,1.6,0.04,27.0,103.0,0.9916,3.07,0.4,10.8 +9.2,0.28,0.49,11.8,0.042,29.0,137.0,0.998,3.1,0.34,10.1 +6.2,0.18,0.49,4.5,0.047,17.0,90.0,0.9919,3.27,0.37,11.6 +5.3,0.165,0.24,1.1,0.051,25.0,105.0,0.9925,3.32,0.47,9.1 +9.8,0.25,0.74,10.0,0.056,36.0,225.0,0.9977,3.06,0.43,10.0 +8.1,0.29,0.49,7.1,0.042,22.0,124.0,0.9944,3.14,0.41,10.8 +6.8,0.22,0.49,0.9,0.052,26.0,128.0,0.991,3.25,0.35,11.4 +7.2,0.22,0.49,1.0,0.045,34.0,140.0,0.99,3.05,0.34,12.7 +7.4,0.25,0.49,1.1,0.042,35.0,156.0,0.9917,3.13,0.55,11.3 +8.2,0.18,0.49,1.1,0.033,28.0,81.0,0.9923,3.0,0.68,10.4 +6.1,0.22,0.49,1.5,0.051,18.0,87.0,0.9928,3.3,0.46,9.6 +7.0,0.39,0.24,1.0,0.048,8.0,119.0,0.9923,3.0,0.31,10.1 +6.1,0.22,0.49,1.5,0.051,18.0,87.0,0.9928,3.3,0.46,9.6 +6.5,0.36,0.49,2.9,0.03,16.0,94.0,0.9902,3.1,0.49,12.1 +7.1,0.29,0.49,1.2,0.031,32.0,99.0,0.9893,3.07,0.33,12.2 +7.4,0.25,0.49,1.1,0.042,35.0,156.0,0.9917,3.13,0.55,11.3 +6.9,0.23,0.24,14.2,0.053,19.0,94.0,0.9982,3.17,0.5,9.6 +8.5,0.56,0.74,17.85,0.051,51.0,243.0,1.0005,2.99,0.7,9.2 +8.2,0.18,0.49,1.1,0.033,28.0,81.0,0.9923,3.0,0.68,10.4 +6.3,0.23,0.49,7.1,0.05,67.0,210.0,0.9951,3.23,0.34,9.5 +6.1,0.25,0.49,7.6,0.052,67.0,226.0,0.9956,3.16,0.47,8.9 +7.2,0.26,0.74,13.6,0.05,56.0,162.0,0.998,3.03,0.44,8.8 +7.2,0.31,0.24,1.4,0.057,17.0,117.0,0.9928,3.16,0.35,10.5 +8.0,0.25,0.49,1.2,0.061,27.0,117.0,0.9938,3.08,0.34,9.4 +7.0,0.18,0.49,5.3,0.04,34.0,125.0,0.9914,3.24,0.4,12.2 +7.8,0.43,0.49,13.0,0.033,37.0,158.0,0.9955,3.14,0.35,11.3 +8.3,0.2,0.74,4.45,0.044,33.0,130.0,0.9924,3.25,0.42,12.2 +6.3,0.27,0.49,1.2,0.063,35.0,92.0,0.9911,3.38,0.42,12.2 +7.4,0.16,0.49,1.2,0.055,18.0,150.0,0.9917,3.23,0.47,11.2 +7.4,0.16,0.49,1.2,0.055,18.0,150.0,0.9917,3.23,0.47,11.2 +6.9,0.19,0.49,6.6,0.036,49.0,172.0,0.9932,3.2,0.27,11.5 +7.8,0.43,0.49,13.0,0.033,37.0,158.0,0.9955,3.14,0.35,11.3 +7.2,0.4,0.49,1.1,0.048,11.0,138.0,0.9929,3.01,0.42,9.3 +7.8,0.43,0.49,13.0,0.033,37.0,158.0,0.9955,3.14,0.35,11.3 +7.6,0.52,0.49,14.0,0.034,37.0,156.0,0.9958,3.14,0.38,11.8 +8.3,0.21,0.49,19.8,0.054,50.0,231.0,1.0012,2.99,0.54,9.2 +6.9,0.34,0.74,11.2,0.069,44.0,150.0,0.9968,3.0,0.81,9.2 +6.3,0.27,0.49,1.2,0.063,35.0,92.0,0.9911,3.38,0.42,12.2 +8.3,0.2,0.74,4.45,0.044,33.0,130.0,0.9924,3.25,0.42,12.2 +7.1,0.22,0.74,2.7,0.044,42.0,144.0,0.991,3.31,0.41,12.2 +7.9,0.11,0.49,4.5,0.048,27.0,133.0,0.9946,3.24,0.42,10.6 +8.5,0.17,0.74,3.6,0.05,29.0,128.0,0.9928,3.28,0.4,12.4 +6.4,0.145,0.49,5.4,0.048,54.0,164.0,0.9946,3.56,0.44,10.8 +7.4,0.16,0.49,1.2,0.055,18.0,150.0,0.9917,3.23,0.47,11.2 +8.3,0.19,0.49,1.2,0.051,11.0,137.0,0.9918,3.06,0.46,11.0 +8.0,0.44,0.49,9.1,0.031,46.0,151.0,0.9926,3.16,0.27,12.7 +7.0,0.2,0.74,0.8,0.044,19.0,163.0,0.9931,3.46,0.53,10.2 +6.9,0.19,0.49,6.6,0.036,49.0,172.0,0.9932,3.2,0.27,11.5 +7.1,0.25,0.49,3.0,0.03,30.0,96.0,0.9903,3.13,0.39,12.3 +6.5,0.24,0.24,1.6,0.046,15.0,60.0,0.9928,3.19,0.39,9.8 +7.2,0.4,0.49,1.1,0.048,11.0,138.0,0.9929,3.01,0.42,9.3 +7.6,0.52,0.49,14.0,0.034,37.0,156.0,0.9958,3.14,0.38,11.8 +7.8,0.43,0.49,13.0,0.033,37.0,158.0,0.9955,3.14,0.35,11.3 +7.8,0.21,0.49,1.35,0.052,6.0,48.0,0.9911,3.15,0.28,11.4 +7.0,0.2,0.49,5.9,0.038,39.0,128.0,0.9938,3.21,0.48,10.8 +6.9,0.25,0.24,3.6,0.057,13.0,85.0,0.9942,2.99,0.48,9.5 +7.2,0.08,0.49,1.3,0.05,18.0,148.0,0.9945,3.46,0.44,10.2 +7.1,0.85,0.49,8.7,0.028,40.0,184.0,0.9962,3.22,0.36,10.7 +7.6,0.51,0.24,1.2,0.04,10.0,104.0,0.992,3.05,0.29,10.8 +7.9,0.22,0.24,4.6,0.044,39.0,159.0,0.9927,2.99,0.28,11.5 +7.7,0.16,0.49,2.0,0.056,20.0,124.0,0.9948,3.32,0.49,10.7 +7.2,0.08,0.49,1.3,0.05,18.0,148.0,0.9945,3.46,0.44,10.2 +6.6,0.25,0.24,1.7,0.048,26.0,124.0,0.9942,3.37,0.6,10.1 +6.7,0.16,0.49,2.4,0.046,57.0,187.0,0.9952,3.62,0.81,10.4 +6.9,0.25,0.24,3.6,0.057,13.0,85.0,0.9942,2.99,0.48,9.5 +7.5,0.32,0.24,4.6,0.053,8.0,134.0,0.9958,3.14,0.5,9.1 +7.4,0.28,0.49,1.5,0.034,20.0,126.0,0.9918,2.98,0.39,10.6 +6.2,0.15,0.49,0.9,0.033,17.0,51.0,0.9932,3.3,0.7,9.4 +6.7,0.25,0.74,19.4,0.054,44.0,169.0,1.0004,3.51,0.45,9.8 +6.5,0.26,0.74,13.3,0.044,68.0,224.0,0.9972,3.18,0.54,9.5 +7.9,0.16,0.74,17.85,0.037,52.0,187.0,0.9998,2.99,0.41,9.3 +5.6,0.185,0.49,1.1,0.03,28.0,117.0,0.9918,3.55,0.45,10.3 +7.5,0.2,0.49,1.3,0.031,8.0,97.0,0.9918,3.06,0.62,11.1 +8.0,0.3,0.49,9.4,0.046,47.0,188.0,0.9964,3.14,0.48,10.0 +8.0,0.34,0.49,9.0,0.033,39.0,180.0,0.9936,3.13,0.38,12.3 +7.7,0.35,0.49,8.65,0.033,42.0,186.0,0.9931,3.14,0.38,12.4 +7.6,0.29,0.49,9.6,0.03,45.0,197.0,0.9938,3.13,0.38,12.3 +6.7,0.62,0.24,1.1,0.039,6.0,62.0,0.9934,3.41,0.32,10.4 +6.8,0.27,0.49,1.2,0.044,35.0,126.0,0.99,3.13,0.48,12.1 +7.7,0.27,0.49,1.8,0.041,23.0,86.0,0.9914,3.16,0.42,12.5 +6.7,0.51,0.24,2.1,0.043,14.0,155.0,0.9904,3.22,0.6,13.0 +7.4,0.19,0.49,9.3,0.03,26.0,132.0,0.994,2.99,0.32,11.0 +8.3,0.2,0.49,1.7,0.04,34.0,169.0,0.9938,3.05,0.37,10.1 +6.6,0.3,0.24,1.2,0.034,17.0,121.0,0.9933,3.13,0.36,9.2 +6.8,0.36,0.24,4.6,0.039,24.0,124.0,0.9909,3.27,0.34,12.6 +7.0,0.17,0.74,12.8,0.045,24.0,126.0,0.9942,3.26,0.38,12.2 +9.2,0.18,0.49,1.5,0.041,39.0,130.0,0.9945,3.04,0.49,9.8 +8.1,0.2,0.49,8.1,0.051,51.0,205.0,0.9954,3.1,0.52,11.0 +7.8,0.26,0.74,7.5,0.044,59.0,160.0,0.996,3.22,0.64,10.0 +6.8,0.21,0.49,14.5,0.06,50.0,170.0,0.9991,3.55,0.44,9.8 +7.9,0.2,0.49,1.6,0.053,15.0,144.0,0.993,3.16,0.47,10.5 +8.0,0.18,0.49,1.8,0.061,10.0,145.0,0.9942,3.23,0.48,10.0 +8.8,0.23,0.74,3.2,0.042,15.0,126.0,0.9934,3.02,0.51,11.2 +7.3,0.22,0.49,9.4,0.034,29.0,134.0,0.9939,2.99,0.32,11.0 +7.3,0.22,0.49,9.9,0.031,48.0,161.0,0.9937,3.01,0.28,11.2 +7.4,0.19,0.49,9.3,0.03,26.0,132.0,0.994,2.99,0.32,11.0 +7.3,0.155,0.49,1.3,0.039,34.0,136.0,0.9926,3.14,0.77,10.5 +8.2,0.22,0.49,9.6,0.037,53.0,154.0,0.9951,3.02,0.33,10.6 +8.2,0.24,0.49,9.3,0.038,52.0,163.0,0.9952,3.02,0.33,10.6 +8.4,0.23,0.49,7.8,0.035,22.0,95.0,0.9935,3.04,0.34,12.0 +8.3,0.2,0.49,1.7,0.04,34.0,169.0,0.9938,3.05,0.37,10.1 +8.3,0.2,0.49,1.7,0.038,38.0,167.0,0.9939,3.05,0.37,10.1 +6.6,0.3,0.24,1.2,0.034,17.0,121.0,0.9933,3.13,0.36,9.2 +6.9,0.21,0.49,1.4,0.041,15.0,164.0,0.9927,3.25,0.63,11.0 +8.0,0.25,0.49,9.0,0.044,31.0,185.0,0.998,3.34,0.49,10.0 +6.6,0.21,0.49,18.15,0.042,41.0,158.0,0.9997,3.28,0.39,8.7 +7.2,0.27,0.74,12.5,0.037,47.0,156.0,0.9981,3.04,0.44,8.7 +14.2,0.27,0.49,1.1,0.037,33.0,156.0,0.992,3.15,0.54,11.1 +7.9,0.28,0.49,7.7,0.045,48.0,195.0,0.9954,3.04,0.55,11.0 +7.4,0.27,0.49,1.1,0.037,33.0,156.0,0.992,3.15,0.54,11.1 +6.6,0.21,0.49,18.15,0.042,41.0,158.0,0.9997,3.28,0.39,8.7 +7.2,0.27,0.74,12.5,0.037,47.0,156.0,0.9981,3.04,0.44,8.7 +8.1,0.3,0.49,8.1,0.037,26.0,174.0,0.9943,3.1,0.3,11.2 +7.5,0.23,0.49,7.7,0.049,61.0,209.0,0.9941,3.14,0.3,11.1 +7.3,0.26,0.49,5.0,0.028,32.0,107.0,0.9936,3.24,0.54,10.8 +7.1,0.18,0.74,15.6,0.044,44.0,176.0,0.9996,3.38,0.67,9.0 +8.5,0.15,0.49,1.5,0.031,17.0,122.0,0.9932,3.03,0.4,10.3 +8.9,0.13,0.49,1.0,0.028,6.0,24.0,0.9926,2.91,0.32,9.9 +8.1,0.28,0.49,1.0,0.04,32.0,148.0,0.9936,3.13,0.41,10.0 +6.0,0.17,0.49,1.0,0.034,26.0,106.0,0.992,3.21,0.42,9.8 +7.3,0.26,0.49,5.0,0.028,32.0,107.0,0.9936,3.24,0.54,10.8 +7.1,0.18,0.74,15.6,0.044,44.0,176.0,0.9996,3.38,0.67,9.0 +7.1,0.53,0.24,0.8,0.029,29.0,86.0,0.993,3.16,0.32,9.1 +7.2,0.16,0.49,1.3,0.037,27.0,104.0,0.9924,3.23,0.57,10.6 +7.3,0.14,0.49,1.1,0.038,28.0,99.0,0.9928,3.2,0.72,10.6 +8.9,0.13,0.49,1.0,0.028,6.0,24.0,0.9926,2.91,0.32,9.9 +7.9,0.12,0.49,5.2,0.049,33.0,152.0,0.9952,3.18,0.47,10.6 +6.7,0.29,0.49,4.7,0.034,35.0,156.0,0.9945,3.13,0.45,9.9 +6.7,0.3,0.49,4.8,0.034,36.0,158.0,0.9945,3.12,0.45,9.9 +7.1,0.36,0.24,1.8,0.025,32.0,102.0,0.9903,3.34,0.59,12.8 +8.5,0.15,0.49,1.5,0.031,17.0,122.0,0.9932,3.03,0.4,10.3 +7.9,0.18,0.49,5.2,0.051,36.0,157.0,0.9953,3.18,0.48,10.6 +6.6,0.19,0.99,1.2,0.122,45.0,129.0,0.9936,3.09,0.31,8.7 +7.3,0.21,0.49,1.8,0.038,44.0,152.0,0.9912,3.32,0.44,12.6 +6.9,0.3,0.49,7.6,0.057,25.0,156.0,0.9962,3.43,0.63,11.0 +7.9,0.42,0.49,8.2,0.056,32.0,164.0,0.9965,3.29,0.6,11.2 +6.9,0.24,0.49,1.3,0.032,35.0,148.0,0.9932,3.45,0.57,10.7 +7.6,0.23,0.49,10.0,0.036,45.0,182.0,0.9967,3.08,0.58,9.6 +7.9,0.18,0.49,5.2,0.051,36.0,157.0,0.9953,3.18,0.48,10.6 +6.2,0.43,0.49,6.4,0.045,12.0,115.0,0.9963,3.27,0.57,9.0 +8.8,0.35,0.49,1.0,0.036,14.0,56.0,0.992,2.96,0.33,10.5 +7.8,0.3,0.74,1.8,0.033,33.0,156.0,0.991,3.29,0.52,12.8 +9.1,0.28,0.49,2.0,0.059,10.0,112.0,0.9958,3.15,0.46,10.1 +7.1,0.34,0.49,1.5,0.027,26.0,126.0,0.99,3.3,0.33,12.2 +7.8,0.3,0.74,1.8,0.033,33.0,156.0,0.991,3.29,0.52,12.8 +9.1,0.28,0.49,2.0,0.059,10.0,112.0,0.9958,3.15,0.46,10.1 +8.5,0.19,0.49,3.5,0.044,29.0,117.0,0.9938,3.14,0.51,10.1 +7.6,0.18,0.49,18.05,0.046,36.0,158.0,0.9996,3.06,0.41,9.2 +7.5,0.19,0.49,1.8,0.055,19.0,110.0,0.9946,3.33,0.44,9.9 +7.4,0.3,0.49,8.2,0.055,49.0,188.0,0.9974,3.52,0.58,9.7 +6.7,0.3,0.74,5.0,0.038,35.0,157.0,0.9945,3.21,0.46,9.9 +6.6,0.3,0.74,4.6,0.041,36.0,159.0,0.9946,3.21,0.45,9.9 +7.4,0.3,0.49,8.2,0.055,49.0,188.0,0.9974,3.52,0.58,9.7 +6.9,0.22,0.49,7.0,0.063,50.0,168.0,0.9957,3.54,0.5,10.3 +7.8,0.26,0.49,3.1,0.045,21.0,116.0,0.9931,3.16,0.35,10.3 +8.5,0.17,0.49,8.8,0.048,23.0,108.0,0.9947,2.88,0.34,10.5 +6.8,0.17,0.74,2.4,0.053,61.0,182.0,0.9953,3.63,0.76,10.5 +6.2,0.27,0.49,1.4,0.05,20.0,74.0,0.9931,3.32,0.44,9.8 +7.1,0.64,0.49,1.8,0.05,17.0,128.0,0.9946,3.31,0.58,10.6 +6.4,0.18,0.74,11.9,0.046,54.0,168.0,0.9978,3.58,0.68,10.1 +7.6,0.31,0.49,13.4,0.062,50.0,191.0,0.9989,3.22,0.53,9.0 +9.8,0.31,0.49,15.4,0.046,13.0,119.0,1.0004,3.18,0.45,9.5 +9.0,0.3,0.49,7.2,0.039,32.0,84.0,0.9938,2.94,0.32,11.5 +8.4,0.24,0.49,7.4,0.039,46.0,108.0,0.9934,3.03,0.33,11.9 +6.4,0.18,0.74,11.9,0.046,54.0,168.0,0.9978,3.58,0.68,10.1 +6.4,0.25,0.74,7.8,0.045,52.0,209.0,0.9956,3.21,0.42,9.2 +7.3,0.3,0.74,13.5,0.039,46.0,165.0,0.9982,3.02,0.4,8.7 +9.3,0.31,0.49,1.3,0.042,34.0,147.0,0.9948,3.11,0.46,9.8 +6.4,0.25,0.74,7.8,0.045,52.0,209.0,0.9956,3.21,0.42,9.2 +7.3,0.3,0.74,13.5,0.039,46.0,165.0,0.9982,3.02,0.4,8.7 +7.0,0.27,0.74,1.5,0.036,27.0,122.0,0.9926,3.35,0.48,11.2 +7.9,0.14,0.74,1.2,0.028,30.0,165.0,0.991,3.08,0.82,12.3 +6.4,0.12,0.49,6.4,0.042,49.0,161.0,0.9945,3.34,0.44,10.4 +6.8,0.21,0.74,1.2,0.047,25.0,111.0,0.9916,3.13,0.41,10.7 +8.6,0.16,0.49,7.3,0.043,9.0,63.0,0.9953,3.13,0.59,10.5 +7.0,0.29,0.49,3.8,0.047,37.0,136.0,0.9938,2.95,0.4,9.4 +6.4,0.27,0.49,7.3,0.046,53.0,206.0,0.9956,3.24,0.43,9.2 +6.6,0.55,0.01,2.7,0.034,56.0,122.0,0.9906,3.15,0.3,11.9 +6.4,0.27,0.49,7.3,0.046,53.0,206.0,0.9956,3.24,0.43,9.2 +6.3,0.24,0.74,1.4,0.172,24.0,108.0,0.9932,3.27,0.39,9.9 +6.7,0.33,0.49,1.6,0.167,20.0,94.0,0.9914,3.11,0.5,11.4 +7.0,0.29,0.49,3.8,0.047,37.0,136.0,0.9938,2.95,0.4,9.4 +8.2,0.34,0.49,8.0,0.046,55.0,223.0,0.996,3.08,0.52,10.7 +5.6,0.39,0.24,4.7,0.034,27.0,77.0,0.9906,3.28,0.36,12.7 +5.6,0.41,0.24,1.9,0.034,10.0,53.0,0.98815,3.32,0.5,13.5 +6.7,0.41,0.01,2.8,0.048,39.0,137.0,0.9942,3.24,0.35,9.5 +7.1,0.26,0.49,2.2,0.032,31.0,113.0,0.9903,3.37,0.42,12.9 +7.5,0.32,0.49,1.7,0.031,44.0,109.0,0.9906,3.07,0.46,12.5 +5.8,0.19,0.49,4.9,0.04,44.0,118.0,0.9935,3.34,0.38,9.5 +6.9,0.27,0.49,23.5,0.057,59.0,235.0,1.0024,2.98,0.47,8.6 +8.1,0.2,0.49,11.8,0.048,46.0,212.0,0.9968,3.09,0.46,10.0 +7.5,0.32,0.49,1.7,0.031,44.0,109.0,0.9906,3.07,0.46,12.5 +8.2,0.26,0.49,5.2,0.04,19.0,100.0,0.9941,3.12,0.34,10.1 +7.8,0.26,0.49,3.2,0.027,28.0,87.0,0.9919,3.03,0.32,11.3 +8.0,0.14,0.49,1.5,0.035,42.0,120.0,0.9928,3.26,0.4,10.6 +8.0,0.29,0.49,11.7,0.035,40.0,131.0,0.9958,3.14,0.34,10.8 +7.5,0.19,0.49,1.6,0.047,42.0,140.0,0.9932,3.4,0.47,10.7 +6.9,0.34,0.49,7.3,0.045,61.0,206.0,0.9957,3.09,0.4,9.0 +6.2,0.2,0.49,1.6,0.065,17.0,143.0,0.9937,3.22,0.52,9.2 +6.4,0.37,0.49,13.3,0.045,53.0,243.0,0.9982,3.14,0.48,8.5 +6.2,0.22,0.49,6.0,0.029,31.0,128.0,0.9928,3.41,0.36,11.3 +7.8,0.26,0.49,3.2,0.027,28.0,87.0,0.9919,3.03,0.32,11.3 +8.9,0.32,0.49,1.6,0.05,17.0,131.0,0.9956,3.13,0.34,9.4 +6.5,0.44,0.49,7.7,0.045,16.0,169.0,0.9957,3.11,0.37,8.7 +7.0,0.14,0.49,5.9,0.053,22.0,118.0,0.9954,3.36,0.36,9.4 +9.0,0.17,0.49,1.0,0.039,46.0,131.0,0.993,3.09,0.51,10.5 +6.4,0.26,0.49,6.4,0.037,37.0,161.0,0.9954,3.38,0.53,9.7 +9.0,0.22,0.49,10.4,0.048,52.0,195.0,0.9987,3.31,0.44,10.2 +8.9,0.32,0.49,1.6,0.05,17.0,131.0,0.9956,3.13,0.34,9.4 +8.2,0.2,0.49,3.5,0.057,14.0,108.0,0.9928,3.19,0.35,11.5 +7.8,0.15,0.24,7.7,0.047,21.0,98.0,0.9951,2.94,0.31,9.6 +6.9,0.25,0.24,1.8,0.053,6.0,121.0,0.993,3.23,0.7,11.4 +8.2,0.2,0.49,3.5,0.057,14.0,108.0,0.9928,3.19,0.35,11.5 +7.1,0.28,0.49,6.5,0.041,28.0,111.0,0.9926,3.41,0.58,12.2 +7.4,0.19,0.49,6.7,0.037,15.0,110.0,0.9938,3.2,0.38,11.0 +8.3,0.25,0.49,16.8,0.048,50.0,228.0,1.0001,3.03,0.52,9.2 +7.5,0.14,0.74,1.6,0.035,21.0,126.0,0.9933,3.26,0.45,10.2 +7.8,0.49,0.49,7.0,0.043,29.0,149.0,0.9952,3.21,0.33,10.0 +8.1,0.12,0.49,1.2,0.042,43.0,160.0,0.9934,3.13,0.48,9.7 +7.6,0.47,0.49,13.0,0.239,42.0,220.0,0.9988,2.96,0.51,9.2 +7.9,0.22,0.49,3.8,0.042,26.0,105.0,0.993,3.1,0.39,10.5 +7.8,0.49,0.49,7.0,0.043,29.0,149.0,0.9952,3.21,0.33,10.0 +6.4,0.22,0.49,7.5,0.054,42.0,151.0,0.9948,3.27,0.52,10.1 +7.3,0.19,0.49,15.55,0.058,50.0,134.0,0.9998,3.42,0.36,9.1 +8.1,0.3,0.49,12.3,0.049,50.0,144.0,0.9971,3.09,0.57,10.2 +7.3,0.19,0.49,15.55,0.058,50.0,134.0,0.9998,3.42,0.36,9.1 +7.5,0.24,0.49,9.4,0.048,50.0,149.0,0.9962,3.17,0.59,10.5 +6.4,0.22,0.49,7.5,0.054,42.0,151.0,0.9948,3.27,0.52,10.1 +7.8,0.21,0.49,1.2,0.036,20.0,99.0,0.99,3.05,0.28,12.1 +7.1,0.3,0.49,1.6,0.045,31.0,100.0,0.9942,3.4,0.59,10.2 +6.9,0.26,0.49,1.6,0.058,39.0,166.0,0.9965,3.65,0.52,9.4 +7.6,0.31,0.49,3.95,0.044,27.0,131.0,0.9912,3.08,0.67,12.8 +6.4,0.42,0.74,12.8,0.076,48.0,209.0,0.9978,3.12,0.58,9.0 +8.2,0.29,0.49,1.0,0.044,29.0,118.0,0.9928,3.24,0.36,10.9 +7.9,0.33,0.28,31.6,0.053,35.0,176.0,1.0103,3.15,0.38,8.8 +6.6,0.46,0.49,7.4,0.052,19.0,184.0,0.9956,3.11,0.38,9.0 +7.8,0.28,0.49,1.3,0.046,27.0,142.0,0.9936,3.09,0.59,10.2 +5.8,0.15,0.49,1.1,0.048,21.0,98.0,0.9929,3.19,0.48,9.2 +7.8,0.4,0.49,7.8,0.06,34.0,162.0,0.9966,3.26,0.58,11.3 +6.6,0.31,0.49,7.7,0.05,52.0,220.0,0.9964,3.12,0.45,8.8 +6.6,0.325,0.49,7.7,0.049,53.0,217.0,0.996,3.16,0.4,9.3 +6.6,0.27,0.49,7.8,0.049,62.0,217.0,0.9959,3.17,0.45,9.4 +6.7,0.26,0.49,8.3,0.047,54.0,191.0,0.9954,3.23,0.4,10.3 +6.7,0.21,0.49,1.4,0.047,30.0,114.0,0.9914,2.92,0.42,10.8 +7.9,0.33,0.28,31.6,0.053,35.0,176.0,1.0103,3.15,0.38,8.8 +8.1,0.28,0.46,15.4,0.059,32.0,177.0,1.0004,3.27,0.58,9.0 +6.5,0.13,0.37,1.0,0.036,48.0,114.0,0.9911,3.41,0.51,11.5 +7.8,0.445,0.56,1.0,0.04,8.0,84.0,0.9938,3.25,0.43,10.8 +8.8,0.39,0.34,5.9,0.055,33.0,128.0,0.9927,2.95,0.51,11.8 +7.9,0.18,0.33,1.2,0.033,20.0,72.0,0.9922,3.12,0.38,10.5 +7.1,0.31,0.38,1.2,0.036,10.0,124.0,0.9924,3.14,0.44,9.9 +7.8,0.24,0.18,6.7,0.046,33.0,160.0,0.9963,3.2,0.56,9.8 +7.0,0.35,0.3,6.5,0.028,27.0,87.0,0.9936,3.4,0.42,11.4 +6.6,0.26,0.31,4.8,0.138,41.0,168.0,0.9951,3.2,0.38,9.3 +6.6,0.27,0.31,5.3,0.137,35.0,163.0,0.9951,3.2,0.38,9.3 +6.8,0.22,0.29,8.9,0.046,82.0,188.0,0.9955,3.3,0.44,10.3 +6.2,0.27,0.32,8.8,0.047,65.0,224.0,0.9961,3.17,0.47,8.9 +7.0,0.35,0.3,6.5,0.028,27.0,87.0,0.9936,3.4,0.42,11.4 +7.3,0.23,0.37,1.8,0.032,60.0,156.0,0.992,3.11,0.35,11.1 +6.2,0.3,0.2,6.6,0.045,42.0,170.0,0.9944,3.36,0.45,10.4 +6.4,0.35,0.2,5.7,0.034,18.0,117.0,0.9944,3.33,0.43,10.1 +7.6,0.32,0.34,18.35,0.054,44.0,197.0,1.0008,3.22,0.55,9.0 +6.3,0.31,0.3,10.0,0.046,49.0,212.0,0.9962,3.74,0.55,11.9 +7.2,0.25,0.28,14.4,0.055,55.0,205.0,0.9986,3.12,0.38,9.0 +7.2,0.25,0.28,14.4,0.055,55.0,205.0,0.9986,3.12,0.38,9.0 +7.3,0.26,0.33,17.85,0.049,41.5,195.0,1.0,3.06,0.44,9.1 +7.2,0.25,0.28,14.4,0.055,55.0,205.0,0.9986,3.12,0.38,9.0 +7.4,0.26,0.37,9.4,0.047,42.0,147.0,0.9982,3.46,0.72,10.0 +7.3,0.26,0.33,17.85,0.049,41.5,195.0,1.0,3.06,0.44,9.1 +6.7,0.25,0.26,1.55,0.041,118.5,216.0,0.9949,3.55,0.63,9.4 +7.1,0.16,0.25,1.3,0.034,28.0,123.0,0.9915,3.27,0.55,11.4 +9.0,0.43,0.3,1.5,0.05,7.0,175.0,0.9951,3.11,0.45,9.7 +7.2,0.25,0.28,14.4,0.055,55.0,205.0,0.9986,3.12,0.38,9.0 +7.0,0.24,0.3,4.2,0.04,41.0,213.0,0.9927,3.28,0.49,11.8 +6.7,0.265,0.22,8.6,0.048,54.0,198.0,0.9955,3.25,0.41,10.2 +7.7,0.12,0.32,1.4,0.06,47.0,150.0,0.9952,3.37,0.42,9.2 +7.2,0.21,0.33,3.0,0.036,35.0,132.0,0.9928,3.25,0.4,11.0 +8.5,0.32,0.36,14.9,0.041,47.0,190.0,0.9982,3.08,0.31,10.0 +6.9,0.18,0.3,2.0,0.038,39.0,190.0,0.9914,3.32,0.37,12.2 +7.0,0.24,0.3,4.2,0.04,41.0,213.0,0.9927,3.28,0.49,11.8 +6.3,0.26,0.29,2.2,0.043,35.0,175.0,0.9918,3.38,0.43,11.6 +6.7,0.26,0.3,1.8,0.043,25.0,121.0,0.9944,3.44,0.61,10.2 +7.9,0.29,0.36,11.1,0.033,43.0,208.0,0.9969,3.14,0.46,10.3 +6.5,0.27,0.19,4.2,0.046,6.0,114.0,0.9955,3.25,0.35,8.6 +6.7,0.33,0.42,6.4,0.058,27.0,151.0,0.9954,3.16,0.44,9.6 +6.7,0.31,0.42,6.4,0.057,25.0,148.0,0.9955,3.16,0.45,9.6 +6.6,0.25,0.31,1.5,0.035,32.0,127.0,0.9921,3.41,0.47,11.3 +6.4,0.24,0.22,1.5,0.038,38.0,157.0,0.9934,3.41,0.55,9.9 +6.8,0.26,0.29,16.95,0.056,48.0,179.0,0.9998,3.45,0.4,9.6 +7.0,0.61,0.26,1.7,0.051,25.0,161.0,0.9946,3.36,0.6,10.6 +6.8,0.22,0.3,13.6,0.055,50.0,180.0,0.9984,3.44,0.39,9.8 +8.1,0.31,0.24,1.6,0.032,10.0,67.0,0.9924,3.08,0.47,10.5 +7.0,0.2,0.3,6.1,0.037,31.0,120.0,0.9939,3.24,0.51,10.8 +7.9,0.18,0.37,3.0,0.061,25.0,178.0,0.995,3.22,0.51,10.0 +6.6,0.34,0.27,6.2,0.059,23.0,136.0,0.9957,3.3,0.49,10.1 +6.8,0.3,0.24,6.6,0.123,35.0,116.0,0.9953,3.07,0.48,9.4 +6.5,0.18,0.34,1.6,0.04,43.0,148.0,0.9912,3.32,0.59,11.5 +7.0,0.21,0.31,6.0,0.046,29.0,108.0,0.9939,3.26,0.5,10.8 +6.8,0.27,0.32,1.5,0.044,19.0,142.0,0.9921,3.1,0.43,9.9 +9.3,0.2,0.33,1.7,0.05,28.0,178.0,0.9954,3.16,0.43,9.0 +5.8,0.23,0.27,1.8,0.043,24.0,69.0,0.9933,3.38,0.31,9.4 +7.6,0.2,0.39,2.6,0.044,30.0,180.0,0.9941,3.46,0.44,10.8 +8.2,0.15,0.48,2.7,0.052,24.0,190.0,0.995,3.5,0.45,10.9 +7.5,0.4,1.0,19.5,0.041,33.0,148.0,0.9977,3.24,0.38,12.0 +6.5,0.18,0.34,1.6,0.04,43.0,148.0,0.9912,3.32,0.59,11.5 +7.0,0.13,0.3,5.0,0.056,31.0,122.0,0.9945,3.47,0.42,10.5 +6.9,0.17,0.22,4.6,0.064,55.0,152.0,0.9952,3.29,0.37,9.3 +7.0,0.3,0.32,6.4,0.034,28.0,97.0,0.9924,3.23,0.44,11.8 +7.6,0.445,0.44,14.5,0.045,68.0,212.0,0.9986,3.48,0.36,10.0 +6.8,0.3,0.24,6.6,0.123,35.0,116.0,0.9953,3.07,0.48,9.4 +7.5,0.22,0.33,6.7,0.036,45.0,138.0,0.9939,3.2,0.68,11.4 +9.2,0.23,0.3,1.1,0.031,40.0,99.0,0.9929,2.94,0.3,10.4 +8.7,0.34,0.46,13.8,0.055,68.0,198.0,0.9988,3.36,0.37,9.5 +6.6,0.545,0.04,2.5,0.031,48.0,111.0,0.9906,3.14,0.32,11.9 +8.1,0.3,0.31,1.1,0.041,49.0,123.0,0.9914,2.99,0.45,11.1 +6.9,0.16,0.3,9.6,0.057,50.0,185.0,0.9978,3.39,0.38,9.6 +8.0,0.32,0.36,4.6,0.042,56.0,178.0,0.9928,3.29,0.47,12.0 +6.1,0.22,0.23,3.1,0.052,15.0,104.0,0.9948,3.14,0.42,8.7 +6.9,0.16,0.3,9.6,0.057,50.0,185.0,0.9978,3.39,0.38,9.6 +7.5,0.15,0.38,1.8,0.054,19.0,101.0,0.9946,3.24,0.44,10.0 +8.4,0.29,0.29,1.05,0.032,4.0,55.0,0.9908,2.91,0.32,11.4 +6.6,0.37,0.47,6.5,0.061,23.0,150.0,0.9954,3.14,0.45,9.6 +7.7,0.38,0.4,2.0,0.038,28.0,152.0,0.9906,3.18,0.32,12.9 +6.3,0.25,0.23,14.9,0.039,47.0,142.0,0.99705,3.14,0.35,9.7 +8.3,0.3,0.36,10.0,0.042,33.0,169.0,0.9982,3.23,0.51,9.3 +6.6,0.22,0.58,1.1,0.133,52.0,136.0,0.9932,3.1,0.3,9.1 +6.1,0.34,0.31,12.0,0.053,46.0,238.0,0.9977,3.16,0.48,8.6 +7.5,0.22,0.29,4.8,0.05,33.0,87.0,0.994,3.14,0.42,9.9 +8.3,0.3,0.36,10.0,0.042,33.0,169.0,0.9982,3.23,0.51,9.3 +8.0,0.27,0.24,1.2,0.044,20.0,102.0,0.9929,3.28,0.42,10.9 +6.1,0.17,0.27,1.5,0.056,45.0,135.0,0.9924,3.2,0.43,10.2 +7.4,0.18,0.3,10.4,0.045,44.0,174.0,0.9966,3.11,0.57,9.7 +6.7,0.16,0.28,2.5,0.046,40.0,153.0,0.9921,3.38,0.51,11.4 +6.1,0.255,0.44,12.3,0.045,53.0,197.0,0.9967,3.24,0.54,9.5 +7.4,0.23,0.25,1.4,0.049,43.0,141.0,0.9934,3.42,0.54,10.2 +6.4,0.16,0.28,2.2,0.042,33.0,93.0,0.9914,3.31,0.43,11.1 +6.3,0.25,0.23,14.9,0.039,47.0,142.0,0.99705,3.14,0.35,9.7 +6.7,0.27,0.25,8.0,0.053,54.0,202.0,0.9961,3.22,0.43,9.3 +6.9,0.29,0.23,8.6,0.056,56.0,215.0,0.9967,3.17,0.44,8.8 +9.6,0.21,0.28,1.2,0.038,12.0,53.0,0.9926,2.8,0.46,10.6 +6.6,0.62,0.2,8.7,0.046,81.0,224.0,0.99605,3.17,0.44,9.3 +6.4,0.28,0.19,5.4,0.042,67.0,181.0,0.99435,3.31,0.35,10.2 +8.0,0.3,0.28,5.7,0.044,31.0,124.0,0.9948,3.16,0.51,10.2 +6.4,0.17,0.27,1.5,0.037,20.0,98.0,0.9916,3.46,0.42,11.0 +7.3,0.21,0.3,10.9,0.037,18.0,112.0,0.997,3.4,0.5,9.6 +6.7,0.27,0.25,8.0,0.053,54.0,202.0,0.9961,3.22,0.43,9.3 +6.9,0.29,0.23,8.6,0.056,56.0,215.0,0.9967,3.17,0.44,8.8 +6.6,0.32,0.26,7.7,0.054,56.0,209.0,0.9961,3.17,0.45,8.8 +7.4,0.32,0.22,1.7,0.051,50.0,179.0,0.9955,3.28,0.69,8.9 +6.6,0.37,0.07,1.4,0.048,58.0,144.0,0.9922,3.17,0.38,10.0 +7.7,0.43,0.28,4.5,0.046,33.0,102.0,0.9918,3.16,0.56,12.2 +7.8,0.39,0.26,9.9,0.059,33.0,181.0,0.9955,3.04,0.42,10.9 +6.5,0.18,0.26,1.4,0.041,40.0,141.0,0.9941,3.34,0.72,9.5 +7.8,0.4,0.26,9.5,0.059,32.0,178.0,0.9955,3.04,0.43,10.9 +7.8,0.39,0.26,9.9,0.059,33.0,181.0,0.9955,3.04,0.42,10.9 +6.9,0.19,0.28,3.0,0.054,33.0,99.0,0.9924,3.16,0.4,10.8 +7.7,0.49,1.0,19.6,0.03,28.0,135.0,0.9973,3.24,0.4,12.0 +6.6,0.25,0.35,14.0,0.069,42.0,163.0,0.999,3.56,0.47,9.8 +6.5,0.18,0.26,1.4,0.041,40.0,141.0,0.9941,3.34,0.72,9.5 +6.4,0.15,0.36,1.8,0.034,43.0,150.0,0.9922,3.42,0.69,11.0 +6.4,0.15,0.36,1.8,0.034,43.0,150.0,0.9922,3.42,0.69,11.0 +8.4,0.17,0.31,5.4,0.052,47.0,150.0,0.9953,3.24,0.38,9.8 +6.1,0.32,0.37,1.8,0.051,13.0,200.0,0.9945,3.49,0.44,10.5 +8.5,0.21,0.26,9.25,0.034,73.0,142.0,0.9945,3.05,0.37,11.4 +8.7,0.45,0.4,1.5,0.067,17.0,100.0,0.9957,3.27,0.57,10.1 +6.7,0.24,0.29,6.8,0.038,54.0,127.0,0.9932,3.33,0.46,11.6 +8.5,0.21,0.26,9.25,0.034,73.0,142.0,0.9945,3.05,0.37,11.4 +7.4,0.33,0.26,2.6,0.04,29.0,115.0,0.9913,3.07,0.52,11.8 +7.2,0.26,0.3,2.1,0.033,50.0,158.0,0.9909,3.33,0.43,12.1 +8.2,0.36,0.29,7.6,0.035,37.0,122.0,0.9939,3.16,0.34,12.0 +7.8,0.2,0.24,1.6,0.026,26.0,189.0,0.991,3.08,0.74,12.1 +9.4,0.16,0.3,1.4,0.042,26.0,176.0,0.9954,3.15,0.46,9.1 +6.4,0.33,0.24,1.6,0.054,25.0,117.0,0.9943,3.36,0.5,9.3 +7.8,0.22,0.36,1.4,0.056,21.0,153.0,0.993,3.2,0.53,10.4 +7.4,0.35,0.31,17.95,0.062,42.0,187.0,1.0002,3.27,0.64,9.1 +6.6,0.37,0.24,2.0,0.064,23.0,120.0,0.9946,3.32,0.54,9.4 +6.7,0.37,0.41,6.3,0.061,22.0,149.0,0.9953,3.16,0.47,9.6 +7.1,0.37,0.32,1.4,0.037,27.0,126.0,0.9918,3.19,0.62,12.0 +6.9,0.25,0.27,9.05,0.039,37.0,128.0,0.9936,3.27,0.34,11.3 +6.8,0.23,0.29,15.4,0.073,56.0,173.0,0.9984,3.06,0.41,8.7 +6.4,0.26,0.21,7.1,0.04,35.0,162.0,0.9956,3.39,0.58,9.9 +7.6,0.3,0.22,10.2,0.049,57.0,191.0,0.9966,3.08,0.4,9.3 +9.4,0.16,0.23,1.6,0.042,14.0,67.0,0.9942,3.07,0.32,9.5 +6.8,0.23,0.29,15.4,0.073,56.0,173.0,0.9984,3.06,0.41,8.7 +6.4,0.26,0.21,7.1,0.04,35.0,162.0,0.9956,3.39,0.58,9.9 +7.6,0.3,0.22,10.2,0.049,57.0,191.0,0.9966,3.08,0.4,9.3 +7.5,0.33,0.39,12.4,0.065,29.0,119.0,0.9974,3.16,0.39,9.4 +7.6,0.38,0.2,3.4,0.046,9.0,116.0,0.9944,3.15,0.41,9.4 +8.8,0.2,0.43,15.0,0.053,60.0,184.0,1.0008,3.28,0.79,8.8 +7.5,0.33,0.39,12.4,0.065,29.0,119.0,0.9974,3.16,0.39,9.4 +8.8,0.2,0.43,15.0,0.053,60.0,184.0,1.0008,3.28,0.79,8.8 +6.6,0.36,0.21,1.5,0.049,39.0,184.0,0.9928,3.18,0.41,9.9 +7.6,0.38,0.2,3.4,0.046,9.0,116.0,0.9944,3.15,0.41,9.4 +5.6,0.46,0.24,4.8,0.042,24.0,72.0,0.9908,3.29,0.37,12.6 +7.2,0.15,0.38,1.2,0.038,18.0,110.0,0.9917,3.19,0.43,11.1 +8.2,0.42,0.29,4.1,0.03,31.0,100.0,0.9911,3.0,0.32,12.8 +6.8,0.3,0.35,2.8,0.038,10.0,164.0,0.9912,3.09,0.53,12.0 +6.7,0.27,0.3,13.9,0.029,34.0,131.0,0.9953,3.36,0.5,12.0 +7.2,0.5,0.0,0.8,0.034,46.0,114.0,0.9932,3.19,0.34,9.2 +6.0,0.26,0.29,1.0,0.032,27.0,96.0,0.9896,3.38,0.44,12.3 +6.8,0.33,0.28,1.2,0.032,38.0,131.0,0.9889,3.19,0.41,13.0 +6.8,0.3,0.35,2.8,0.038,10.0,164.0,0.9912,3.09,0.53,12.0 +7.4,0.29,0.31,1.7,0.035,23.0,110.0,0.9926,3.07,0.38,10.9 +8.2,0.42,0.29,4.1,0.03,31.0,100.0,0.9911,3.0,0.32,12.8 +7.3,0.19,0.24,6.3,0.054,34.0,231.0,0.9964,3.36,0.54,10.0 +6.5,0.32,0.12,11.5,0.033,35.0,165.0,0.9974,3.22,0.32,9.0 +7.1,0.32,0.4,1.5,0.034,13.0,84.0,0.9944,3.42,0.6,10.4 +6.5,0.32,0.12,11.5,0.033,35.0,165.0,0.9974,3.22,0.32,9.0 +7.3,0.19,0.24,6.3,0.054,34.0,231.0,0.9964,3.36,0.54,10.0 +7.3,0.17,0.23,6.3,0.051,35.0,240.0,0.9963,3.36,0.54,10.0 +7.7,0.44,0.24,11.2,0.031,41.0,167.0,0.9948,3.12,0.43,11.3 +7.7,0.44,0.24,11.2,0.031,41.0,167.0,0.9948,3.12,0.43,11.3 +7.4,0.49,0.24,15.1,0.03,34.0,153.0,0.9953,3.13,0.51,12.0 +7.7,0.44,0.24,11.2,0.031,41.0,167.0,0.9948,3.12,0.43,11.3 +7.4,0.49,0.24,15.1,0.03,34.0,153.0,0.9953,3.13,0.51,12.0 +6.4,0.21,0.3,5.6,0.044,43.0,160.0,0.9949,3.6,0.41,10.6 +8.0,0.55,0.42,12.6,0.211,37.0,213.0,0.9988,2.99,0.56,9.3 +7.0,0.19,0.23,5.7,0.123,27.0,104.0,0.9954,3.04,0.54,9.4 +7.2,0.24,0.29,2.2,0.037,37.0,102.0,0.992,3.27,0.64,11.0 +6.5,0.34,0.36,11.0,0.052,53.0,247.0,0.9984,3.44,0.55,9.3 +7.0,0.19,0.23,5.7,0.123,27.0,104.0,0.9954,3.04,0.54,9.4 +6.9,0.18,0.33,1.0,0.054,24.0,164.0,0.9926,3.42,0.51,10.5 +7.2,0.24,0.29,2.2,0.037,37.0,102.0,0.992,3.27,0.64,11.0 +8.2,0.18,0.31,11.8,0.039,96.0,249.0,0.9976,3.07,0.52,9.5 +8.3,0.28,0.45,7.8,0.059,32.0,139.0,0.9972,3.33,0.77,11.2 +6.1,0.34,0.46,4.7,0.029,21.0,94.0,0.991,3.29,0.62,12.3 +7.4,0.44,0.2,11.5,0.049,44.0,157.0,0.998,3.27,0.44,9.0 +7.6,0.26,0.58,7.9,0.041,62.0,180.0,0.9966,3.07,0.38,9.0 +7.4,0.44,0.2,11.5,0.049,44.0,157.0,0.998,3.27,0.44,9.0 +8.7,0.49,0.57,17.8,0.052,34.0,243.0,1.0007,2.98,0.82,9.0 +7.0,0.24,0.25,1.7,0.042,48.0,189.0,0.992,3.25,0.42,11.4 +7.1,0.25,0.25,1.6,0.046,50.0,181.0,0.9925,3.2,0.42,11.0 +6.1,0.34,0.46,4.7,0.029,21.0,94.0,0.991,3.29,0.62,12.3 +6.4,0.18,0.31,1.6,0.049,36.0,127.0,0.9934,3.6,0.67,10.4 +8.3,0.27,0.39,2.4,0.058,16.0,107.0,0.9955,3.28,0.59,10.3 +6.8,0.24,0.35,6.4,0.048,44.0,172.0,0.9944,3.29,0.55,10.5 +8.0,0.22,0.28,14.0,0.053,83.0,197.0,0.9981,3.14,0.45,9.8 +10.0,0.91,0.42,1.6,0.056,34.0,181.0,0.9968,3.11,0.46,10.0 +8.9,0.34,0.34,1.6,0.056,13.0,176.0,0.9946,3.14,0.47,9.7 +8.9,0.33,0.34,1.4,0.056,14.0,171.0,0.9946,3.13,0.47,9.7 +8.0,0.22,0.28,14.0,0.053,83.0,197.0,0.9981,3.14,0.45,9.8 +6.7,0.18,0.19,4.7,0.046,57.0,161.0,0.9946,3.32,0.66,10.5 +7.8,0.2,0.28,10.2,0.054,78.0,186.0,0.997,3.14,0.46,10.0 +7.3,0.13,0.31,2.3,0.054,22.0,104.0,0.9924,3.24,0.92,11.5 +6.6,0.28,0.3,7.8,0.049,57.0,202.0,0.9958,3.24,0.39,9.5 +7.1,0.25,0.3,2.4,0.042,25.0,122.0,0.994,3.43,0.61,10.5 +7.6,0.36,0.44,8.3,0.255,28.0,142.0,0.9958,3.12,0.43,10.2 +7.6,0.27,0.25,13.9,0.05,45.0,199.0,0.9984,3.34,0.5,9.8 +6.9,0.37,0.28,13.8,0.031,34.0,137.0,0.9948,3.1,0.37,11.6 +7.4,0.21,0.27,7.3,0.031,41.0,144.0,0.9932,3.15,0.38,11.8 +8.2,0.18,0.28,8.5,0.035,41.0,140.0,0.9952,3.04,0.37,10.1 +6.3,0.19,0.21,1.8,0.049,35.0,163.0,0.9924,3.31,0.5,10.3 +7.0,0.21,0.22,5.1,0.048,38.0,168.0,0.9945,3.34,0.49,10.4 +5.8,0.33,0.2,16.05,0.047,26.0,166.0,0.9976,3.09,0.46,8.9 +5.8,0.33,0.2,16.05,0.047,26.0,166.0,0.9976,3.09,0.46,8.9 +7.9,0.29,0.31,7.35,0.034,37.0,154.0,0.9938,3.06,0.31,10.8 +6.6,0.31,0.38,16.05,0.058,16.0,165.0,0.9997,3.38,0.6,9.2 +8.0,0.19,0.3,2.0,0.053,48.0,140.0,0.994,3.18,0.49,9.6 +8.0,0.2,0.36,1.2,0.032,21.0,78.0,0.9921,3.08,0.37,10.4 +8.0,0.25,0.26,14.0,0.043,41.0,248.0,0.9986,3.03,0.57,8.7 +7.2,0.2,0.61,16.2,0.043,14.0,103.0,0.9987,3.06,0.36,9.2 +7.7,0.3,0.42,14.3,0.045,45.0,213.0,0.9991,3.18,0.63,9.2 +7.2,0.2,0.61,16.2,0.043,14.0,103.0,0.9987,3.06,0.36,9.2 +7.7,0.3,0.42,14.3,0.045,45.0,213.0,0.9991,3.18,0.63,9.2 +7.7,0.3,0.42,14.3,0.045,45.0,213.0,0.9991,3.18,0.63,9.2 +6.4,0.22,0.32,7.9,0.029,34.0,124.0,0.9948,3.4,0.39,10.2 +7.2,0.2,0.61,16.2,0.043,14.0,103.0,0.9987,3.06,0.36,9.2 +7.0,0.53,0.02,1.0,0.036,39.0,107.0,0.993,3.2,0.32,9.0 +7.3,0.24,0.41,13.6,0.05,41.0,178.0,0.9988,3.37,0.43,9.7 +7.2,0.24,0.4,17.85,0.049,50.0,185.0,1.0,3.34,0.42,9.6 +7.6,0.15,0.4,1.3,0.036,24.0,112.0,0.9932,3.14,0.76,10.0 +7.7,0.3,0.42,14.3,0.045,45.0,213.0,0.9991,3.18,0.63,9.2 +7.6,0.33,0.41,13.7,0.045,44.0,197.0,0.9989,3.18,0.64,9.1 +6.8,0.24,0.31,18.3,0.046,40.0,142.0,1.0,3.3,0.41,8.7 +6.8,0.24,0.31,18.3,0.046,40.0,142.0,1.0,3.3,0.41,8.7 +6.8,0.35,0.44,6.5,0.056,31.0,161.0,0.9952,3.14,0.44,9.5 +7.9,0.26,0.33,10.3,0.039,73.0,212.0,0.9969,2.93,0.49,9.5 +7.5,0.29,0.67,8.1,0.037,53.0,166.0,0.9966,2.9,0.41,8.9 +7.5,0.29,0.67,8.1,0.037,53.0,166.0,0.9966,2.9,0.41,8.9 +7.2,0.31,0.41,8.6,0.053,15.0,89.0,0.9976,3.29,0.64,9.9 +6.7,0.44,0.31,1.9,0.03,41.0,104.0,0.99,3.29,0.62,12.6 +10.0,0.23,0.27,14.1,0.033,45.0,166.0,0.9988,2.72,0.43,9.7 +7.4,0.21,0.3,7.9,0.039,14.0,118.0,0.9942,2.96,0.34,10.4 +8.8,0.23,0.35,10.7,0.04,26.0,183.0,0.9984,2.93,0.49,9.1 +7.8,0.34,0.27,1.2,0.04,25.0,106.0,0.9932,3.01,0.55,10.4 +7.9,0.26,0.33,10.3,0.039,73.0,212.0,0.9969,2.93,0.49,9.5 +7.5,0.29,0.67,8.1,0.037,53.0,166.0,0.9966,2.9,0.41,8.9 +6.0,0.28,0.35,1.9,0.037,16.0,120.0,0.9933,3.16,0.69,10.6 +7.9,0.37,0.3,2.7,0.029,64.0,158.0,0.9916,3.12,0.59,12.0 +7.2,0.36,0.36,5.7,0.038,26.0,98.0,0.9914,2.93,0.59,12.5 +7.6,0.13,0.34,9.3,0.062,40.0,126.0,0.9966,3.21,0.39,9.6 +6.6,0.25,0.36,8.1,0.045,54.0,180.0,0.9958,3.08,0.42,9.2 +7.1,0.18,0.26,1.3,0.041,20.0,71.0,0.9926,3.04,0.74,9.9 +7.9,0.3,0.27,8.5,0.036,20.0,112.0,0.9939,2.96,0.46,11.7 +8.3,0.23,0.3,2.1,0.049,21.0,153.0,0.9953,3.09,0.5,9.6 +6.8,0.43,0.3,3.5,0.033,27.0,135.0,0.9906,3.0,0.37,12.0 +7.2,0.36,0.36,5.7,0.038,26.0,98.0,0.9914,2.93,0.59,12.5 +6.6,0.25,0.36,8.1,0.045,54.0,180.0,0.9958,3.08,0.42,9.2 +7.1,0.18,0.26,1.3,0.041,20.0,71.0,0.9926,3.04,0.74,9.9 +6.6,0.35,0.29,14.4,0.044,54.0,177.0,0.9991,3.17,0.58,8.9 +7.3,0.22,0.5,13.7,0.049,56.0,189.0,0.9994,3.24,0.66,9.0 +8.1,0.26,0.33,11.1,0.052,52.5,158.0,0.9976,3.03,0.49,10.2 +7.6,0.13,0.34,9.3,0.062,40.0,126.0,0.9966,3.21,0.39,9.6 +7.0,0.12,0.19,4.9,0.055,27.0,127.0,0.9953,3.29,0.41,9.4 +8.2,0.37,0.27,1.7,0.028,10.0,59.0,0.9923,2.97,0.48,10.4 +7.6,0.26,0.36,1.6,0.032,6.0,106.0,0.993,3.15,0.4,10.4 +6.3,0.2,0.58,1.4,0.204,15.0,97.0,0.9931,3.16,0.43,10.0 +6.3,0.22,0.57,1.4,0.208,14.0,96.0,0.9932,3.16,0.43,10.0 +7.1,0.25,0.28,1.6,0.052,46.0,169.0,0.9926,3.05,0.41,10.5 +7.0,0.27,0.32,6.8,0.047,47.0,193.0,0.9938,3.23,0.39,11.4 +8.8,0.34,0.33,9.7,0.036,46.0,172.0,0.9966,3.08,0.4,10.2 +9.2,0.27,0.34,10.5,0.043,49.0,228.0,0.9974,3.04,0.41,10.4 +7.1,0.49,0.22,2.0,0.047,146.5,307.5,0.9924,3.24,0.37,11.0 +9.2,0.71,0.23,6.2,0.042,15.0,93.0,0.9948,2.89,0.34,10.1 +7.2,0.47,0.65,8.3,0.083,27.0,182.0,0.9964,3.0,0.35,9.2 +6.8,0.28,0.36,1.6,0.04,25.0,87.0,0.9924,3.23,0.66,10.3 +8.8,0.34,0.33,9.7,0.036,46.0,172.0,0.9966,3.08,0.4,10.2 +9.2,0.27,0.34,10.5,0.043,49.0,228.0,0.9974,3.04,0.41,10.4 +7.3,0.13,0.27,4.6,0.08,34.0,172.0,0.9938,3.23,0.39,11.1 +7.2,0.16,0.35,1.2,0.031,27.0,84.0,0.9928,3.33,0.34,9.9 +6.8,0.31,0.32,7.6,0.052,35.0,143.0,0.9959,3.14,0.38,9.0 +8.3,0.36,0.57,15.0,0.052,35.0,256.0,1.0001,2.93,0.64,8.6 +6.8,0.31,0.32,7.6,0.052,35.0,143.0,0.9959,3.14,0.38,9.0 +8.3,0.36,0.57,15.0,0.052,35.0,256.0,1.0001,2.93,0.64,8.6 +6.3,0.25,0.44,11.6,0.041,48.0,195.0,0.9968,3.18,0.52,9.5 +6.0,0.45,0.42,1.1,0.051,61.0,197.0,0.9932,3.02,0.4,9.0 +8.1,0.26,0.3,7.8,0.049,39.0,152.0,0.9954,2.99,0.58,10.0 +6.4,0.22,0.32,12.0,0.066,57.0,158.0,0.9992,3.6,0.43,9.0 +5.7,0.45,0.42,1.1,0.051,61.0,197.0,0.9932,3.02,0.4,9.0 +7.2,0.19,0.31,1.4,0.046,37.0,135.0,0.9939,3.34,0.57,10.2 +6.7,0.31,0.44,6.7,0.054,29.0,160.0,0.9952,3.04,0.44,9.6 +8.0,0.25,0.13,17.2,0.036,49.0,219.0,0.9996,2.96,0.46,9.7 +9.9,1.005,0.46,1.4,0.046,34.0,185.0,0.9966,3.02,0.49,10.2 +8.1,0.31,0.36,8.2,0.028,29.0,142.0,0.9925,3.01,0.34,13.0 +8.1,0.24,0.38,4.3,0.044,49.0,172.0,0.996,3.37,0.74,10.8 +8.0,0.25,0.13,17.2,0.036,49.0,219.0,0.9996,2.96,0.46,9.7 +6.4,0.29,0.28,11.1,0.063,66.0,169.0,0.9973,2.89,0.57,9.0 +7.2,0.15,0.33,1.1,0.027,16.0,63.0,0.9937,3.37,0.4,9.9 +7.0,0.12,0.32,7.2,0.058,22.0,89.0,0.9966,3.29,0.38,9.2 +7.4,0.32,0.55,16.6,0.056,53.0,238.0,1.0017,2.96,0.58,8.7 +8.5,0.17,0.31,1.0,0.024,13.0,91.0,0.993,2.79,0.37,10.1 +8.5,0.17,0.31,1.0,0.024,13.0,91.0,0.993,2.79,0.37,10.1 +9.5,0.21,0.47,1.3,0.039,21.0,123.0,0.9959,2.9,0.64,9.5 +8.2,0.21,0.48,1.4,0.041,11.0,99.0,0.9958,3.17,0.57,9.9 +7.4,0.32,0.55,16.6,0.056,53.0,238.0,1.0017,2.96,0.58,8.7 +6.8,0.31,0.42,6.9,0.046,50.0,173.0,0.9958,3.19,0.46,9.0 +6.8,0.27,0.28,13.3,0.076,50.0,163.0,0.9979,3.03,0.38,8.6 +7.4,0.21,0.3,8.1,0.047,13.0,114.0,0.9941,3.12,0.35,10.5 +8.0,0.23,0.35,9.2,0.044,53.0,186.0,0.997,3.09,0.56,9.5 +7.6,0.2,0.31,1.4,0.047,41.0,142.0,0.9934,3.43,0.53,10.1 +6.3,0.41,0.3,3.2,0.03,49.0,164.0,0.9927,3.53,0.79,11.7 +8.3,0.49,0.43,2.5,0.036,32.0,116.0,0.9944,3.23,0.47,10.7 +6.3,0.41,0.3,3.2,0.03,49.0,164.0,0.9927,3.53,0.79,11.7 +7.6,0.2,0.26,4.5,0.086,37.0,133.0,0.9963,3.15,0.42,9.2 +7.5,0.26,0.26,18.35,0.084,33.0,139.0,1.0011,3.17,0.39,8.8 +7.5,0.26,0.26,18.35,0.084,33.0,139.0,1.0011,3.17,0.39,8.8 +6.8,0.27,0.35,7.8,0.048,76.0,197.0,0.9959,3.24,0.43,9.5 +6.8,0.28,0.37,7.0,0.057,35.0,208.0,0.9973,3.57,0.55,10.2 +8.4,0.2,0.27,6.3,0.048,30.0,143.0,0.9966,3.25,0.5,9.1 +7.9,0.33,0.26,1.2,0.044,23.0,103.0,0.9932,3.19,0.54,10.5 +7.5,0.38,0.5,12.8,0.042,57.0,184.0,0.9984,3.09,0.46,9.0 +7.6,0.2,0.3,14.2,0.056,53.0,212.5,0.999,3.14,0.46,8.9 +7.6,0.2,0.3,14.2,0.056,53.0,212.5,0.999,3.14,0.46,8.9 +7.6,0.2,0.3,14.2,0.056,53.0,212.5,0.999,3.14,0.46,8.9 +7.6,0.2,0.3,14.2,0.056,53.0,212.5,0.999,3.14,0.46,8.9 +7.6,0.2,0.3,14.2,0.056,53.0,212.5,0.999,3.14,0.46,8.9 +8.1,0.19,0.58,16.65,0.049,48.0,181.0,1.0006,3.2,0.62,9.1 +7.6,0.16,0.41,1.9,0.047,27.0,151.0,0.9937,3.2,0.53,10.1 +8.1,0.22,0.28,7.7,0.043,57.0,176.0,0.9954,3.12,0.55,10.0 +8.0,0.22,0.32,10.4,0.043,63.0,201.0,0.997,3.11,0.53,9.5 +7.1,0.33,0.3,3.3,0.034,30.0,102.0,0.9912,3.08,0.31,12.3 +6.4,0.43,0.27,1.1,0.054,5.0,110.0,0.9939,3.24,0.52,9.1 +7.6,0.2,0.3,14.2,0.056,53.0,212.5,0.999,3.14,0.46,8.9 +7.0,0.12,0.28,6.3,0.057,17.0,103.0,0.9957,3.5,0.44,9.6 +7.4,0.3,0.22,5.25,0.053,33.0,180.0,0.9926,3.13,0.45,11.6 +7.0,0.28,0.33,14.6,0.043,47.0,168.0,0.9994,3.34,0.67,8.8 +8.4,0.2,0.38,11.8,0.055,51.0,170.0,1.0004,3.34,0.82,8.9 +7.0,0.28,0.33,14.6,0.043,47.0,168.0,0.9994,3.34,0.67,8.8 +8.4,0.2,0.38,11.8,0.055,51.0,170.0,1.0004,3.34,0.82,8.9 +8.4,0.2,0.38,11.8,0.055,51.0,170.0,1.0004,3.34,0.82,8.9 +7.3,0.18,0.31,17.3,0.055,32.0,197.0,1.0002,3.13,0.46,9.0 +6.8,0.31,0.09,1.4,0.04,56.0,145.0,0.9922,3.19,0.46,10.0 +6.7,0.31,0.08,1.3,0.038,58.0,147.0,0.9922,3.18,0.46,10.0 +7.6,0.17,0.35,1.6,0.047,43.0,154.0,0.9934,3.36,0.69,11.1 +7.4,0.3,0.22,5.25,0.053,33.0,180.0,0.9926,3.13,0.45,11.6 +7.4,0.26,0.31,2.4,0.043,58.0,178.0,0.9941,3.42,0.68,10.6 +7.0,0.28,0.33,14.6,0.043,47.0,168.0,0.9994,3.34,0.67,8.8 +8.4,0.2,0.38,11.8,0.055,51.0,170.0,1.0004,3.34,0.82,8.9 +5.6,0.18,0.31,1.5,0.038,16.0,84.0,0.9924,3.34,0.58,10.1 +7.2,0.15,0.39,1.8,0.043,21.0,159.0,0.9948,3.52,0.47,10.0 +8.0,0.4,0.33,7.7,0.034,27.0,98.0,0.9935,3.18,0.41,12.2 +7.0,0.25,0.56,2.0,0.035,20.0,95.0,0.9918,3.23,0.53,11.0 +7.2,0.15,0.39,1.8,0.043,21.0,159.0,0.9948,3.52,0.47,10.0 +6.8,0.18,0.46,1.4,0.064,37.0,160.0,0.9924,3.37,0.45,11.1 +6.6,0.32,0.22,16.7,0.046,38.0,133.0,0.9979,3.22,0.67,10.4 +9.0,0.55,0.3,8.1,0.026,14.0,71.0,0.993,2.94,0.36,11.8 +6.9,0.19,0.39,8.0,0.028,22.0,84.0,0.994,3.11,0.66,10.8 +6.3,0.41,0.33,4.7,0.023,28.0,110.0,0.991,3.3,0.38,12.5 +9.0,0.55,0.3,8.1,0.026,14.0,71.0,0.993,2.94,0.36,11.8 +7.0,0.2,0.34,2.1,0.049,12.0,136.0,0.9922,3.25,0.46,11.6 +6.6,0.32,0.22,16.7,0.046,38.0,133.0,0.9979,3.22,0.67,10.4 +7.7,0.26,0.34,6.4,0.05,36.0,163.0,0.9937,3.19,0.7,11.5 +6.3,0.21,0.28,1.5,0.051,46.0,142.0,0.9928,3.23,0.42,10.1 +7.6,0.34,0.39,7.6,0.04,45.0,215.0,0.9965,3.11,0.53,9.2 +6.3,0.21,0.28,1.5,0.051,46.0,142.0,0.9928,3.23,0.42,10.1 +8.0,0.43,0.4,12.4,0.168,29.0,190.0,0.9991,3.07,0.64,9.2 +7.5,0.3,0.71,1.3,0.16,44.0,149.0,0.9948,3.08,0.42,8.9 +6.4,0.26,0.4,1.7,0.179,5.0,60.0,0.9925,3.09,0.54,10.1 +6.9,0.32,0.15,8.1,0.046,51.0,180.0,0.9958,3.13,0.45,8.9 +8.9,0.21,0.34,7.1,0.037,33.0,150.0,0.9962,3.1,0.45,9.7 +7.6,0.34,0.39,7.6,0.04,45.0,215.0,0.9965,3.11,0.53,9.2 +9.5,0.42,0.41,2.3,0.034,22.0,145.0,0.9951,3.06,0.52,11.0 +7.6,0.29,0.26,6.5,0.042,32.0,160.0,0.9944,3.14,0.47,10.7 +6.5,0.25,0.2,1.4,0.024,29.0,101.0,0.9916,3.24,0.54,10.8 +7.2,0.23,0.33,12.7,0.049,50.0,183.0,0.9987,3.41,0.4,9.8 +7.9,0.35,0.36,1.6,0.038,11.0,124.0,0.9928,3.25,0.48,11.0 +8.8,0.2,0.28,1.1,0.018,18.0,72.0,0.9926,2.97,0.35,10.4 +5.7,0.27,0.32,1.2,0.046,20.0,155.0,0.9934,3.8,0.41,10.2 +7.6,0.29,0.26,6.5,0.042,32.0,160.0,0.9944,3.14,0.47,10.7 +5.5,0.14,0.27,4.6,0.029,22.0,104.0,0.9949,3.34,0.44,9.0 +8.7,0.24,0.35,0.6,0.042,11.0,71.0,0.9926,3.08,0.38,10.6 +6.7,0.3,0.45,10.6,0.032,56.0,212.0,0.997,3.22,0.59,9.5 +5.5,0.14,0.27,4.6,0.029,22.0,104.0,0.9949,3.34,0.44,9.0 +5.6,0.13,0.27,4.8,0.028,22.0,104.0,0.9948,3.34,0.45,9.2 +7.4,0.18,0.34,2.7,0.03,30.0,107.0,0.992,2.97,0.53,11.0 +5.7,0.385,0.04,12.6,0.034,22.0,115.0,0.9964,3.28,0.63,9.9 +8.7,0.24,0.35,0.6,0.042,11.0,71.0,0.9926,3.08,0.38,10.6 +8.3,0.33,0.43,9.2,0.046,22.0,126.0,0.9982,3.38,0.47,9.3 +6.8,0.34,0.44,6.6,0.052,28.0,156.0,0.9955,3.14,0.41,9.6 +6.8,0.33,0.44,7.0,0.05,29.0,155.0,0.9955,3.14,0.42,9.5 +6.3,0.28,0.24,8.45,0.031,32.0,172.0,0.9958,3.39,0.57,9.7 +11.8,0.23,0.38,11.1,0.034,15.0,123.0,0.9997,2.93,0.55,9.7 +6.8,0.21,0.27,18.15,0.042,41.0,146.0,1.0001,3.3,0.36,8.7 +6.8,0.21,0.27,18.15,0.042,41.0,146.0,1.0001,3.3,0.36,8.7 +8.6,0.485,0.29,4.1,0.026,19.0,101.0,0.9918,3.01,0.38,12.4 +8.6,0.485,0.29,4.1,0.026,19.0,101.0,0.9918,3.01,0.38,12.4 +7.3,0.29,0.29,4.6,0.029,27.0,155.0,0.9931,3.07,0.26,10.6 +6.8,0.21,0.27,18.15,0.042,41.0,146.0,1.0001,3.3,0.36,8.7 +6.7,0.31,0.31,4.9,0.031,20.0,151.0,0.9926,3.36,0.82,12.0 +7.3,0.29,0.37,8.3,0.044,45.0,227.0,0.9966,3.12,0.47,9.0 +5.7,0.46,0.46,1.4,0.04,31.0,169.0,0.9932,3.13,0.47,8.8 +6.8,0.28,0.44,11.5,0.04,58.0,223.0,0.9969,3.22,0.56,9.5 +6.7,0.23,0.33,1.8,0.036,23.0,96.0,0.9925,3.32,0.4,10.8 +6.9,0.17,0.25,1.6,0.047,34.0,132.0,0.9914,3.16,0.48,11.4 +7.6,0.18,0.36,2.4,0.049,38.0,123.0,0.996,3.6,0.46,10.3 +6.6,0.22,0.28,4.9,0.042,51.0,180.0,0.9952,3.3,0.75,9.5 +7.8,0.27,0.28,1.8,0.05,21.0,127.0,0.9934,3.15,0.44,9.9 +7.7,0.28,0.29,4.3,0.051,25.0,142.0,0.9939,3.16,0.39,10.2 +7.6,0.29,0.29,4.4,0.051,26.0,146.0,0.9939,3.16,0.39,10.2 +5.7,0.32,0.18,1.4,0.029,26.0,104.0,0.9906,3.44,0.37,11.0 +7.1,0.33,0.25,1.6,0.03,25.0,126.0,0.9901,3.22,0.34,12.1 +7.3,0.34,0.3,1.3,0.057,25.0,173.0,0.9948,3.26,0.51,9.1 +6.5,0.19,0.26,5.2,0.04,31.0,140.0,0.995,3.26,0.68,9.5 +6.6,0.23,0.27,5.6,0.043,43.0,164.0,0.9953,3.27,0.76,9.5 +6.6,0.27,0.29,5.3,0.045,57.0,189.0,0.9953,3.31,0.79,9.8 +6.6,0.22,0.28,4.9,0.042,51.0,180.0,0.9952,3.3,0.75,9.5 +7.6,0.18,0.36,2.4,0.049,38.0,123.0,0.996,3.6,0.46,10.3 +6.8,0.36,0.32,1.6,0.039,10.0,124.0,0.9948,3.3,0.67,9.6 +7.0,0.22,0.39,2.1,0.055,39.0,198.0,0.9951,3.52,0.54,10.2 +5.9,0.17,0.3,1.4,0.042,25.0,119.0,0.9931,3.68,0.72,10.5 +7.4,0.45,0.32,7.1,0.044,17.0,117.0,0.9962,3.32,0.41,10.4 +6.8,0.36,0.32,1.6,0.039,10.0,124.0,0.9948,3.3,0.67,9.6 +7.5,0.42,0.14,10.7,0.046,18.0,95.0,0.9959,3.22,0.33,10.7 +7.5,0.33,0.32,11.1,0.036,25.0,119.0,0.9962,3.15,0.34,10.5 +9.4,0.3,0.32,10.7,0.029,14.0,111.0,0.9958,2.85,0.42,10.6 +7.9,0.17,0.32,1.6,0.053,47.0,150.0,0.9948,3.29,0.76,9.6 +7.9,0.17,0.32,1.6,0.053,47.0,150.0,0.9948,3.29,0.76,9.6 +8.2,0.17,0.32,1.5,0.05,17.0,101.0,0.994,3.14,0.58,9.5 +8.3,0.17,0.31,1.5,0.049,48.0,153.0,0.9942,3.12,0.58,9.4 +8.7,0.15,0.3,1.6,0.046,29.0,130.0,0.9942,3.22,0.38,9.8 +7.9,0.17,0.32,1.6,0.053,47.0,150.0,0.9948,3.29,0.76,9.6 +7.2,0.25,0.19,8.0,0.044,51.0,172.0,0.9964,3.16,0.44,9.2 +7.2,0.24,0.19,7.7,0.045,53.0,176.0,0.9958,3.17,0.38,9.5 +5.3,0.76,0.03,2.7,0.043,27.0,93.0,0.9932,3.34,0.38,9.2 +6.6,0.22,0.53,15.1,0.052,22.0,136.0,0.9986,2.94,0.35,9.4 +6.6,0.22,0.53,15.1,0.052,22.0,136.0,0.9986,2.94,0.35,9.4 +8.4,0.28,0.4,8.9,0.048,33.0,146.0,0.9988,3.4,0.46,9.3 +6.8,0.32,0.34,6.0,0.05,5.0,129.0,0.9953,3.19,0.4,9.1 +6.7,0.24,0.33,12.3,0.046,31.0,145.0,0.9983,3.36,0.4,9.5 +7.4,0.18,0.36,13.1,0.056,72.0,163.0,1.0,3.42,0.35,9.1 +6.0,0.16,0.3,6.7,0.043,43.0,153.0,0.9951,3.63,0.46,10.6 +6.7,0.24,0.33,12.3,0.046,31.0,145.0,0.9983,3.36,0.4,9.5 +6.8,0.28,0.35,2.3,0.042,16.0,85.0,0.9906,3.19,0.56,12.4 +6.2,0.34,0.3,11.1,0.047,28.0,237.0,0.9981,3.18,0.49,8.7 +6.0,0.27,0.15,1.5,0.056,35.0,128.0,0.9936,3.12,0.45,8.8 +6.0,0.16,0.3,6.7,0.043,43.0,153.0,0.9951,3.63,0.46,10.6 +6.8,0.32,0.34,6.0,0.05,5.0,129.0,0.9953,3.19,0.4,9.1 +8.5,0.24,0.47,15.2,0.057,40.0,234.0,1.0005,3.02,0.66,9.0 +8.1,0.24,0.33,10.2,0.048,46.0,141.0,0.9972,3.16,0.48,10.3 +7.4,0.18,0.36,13.1,0.056,72.0,163.0,1.0,3.42,0.35,9.1 +7.7,0.23,0.31,10.7,0.038,59.0,186.0,0.9969,3.12,0.55,9.5 +6.5,0.22,0.25,17.1,0.05,44.0,138.0,1.0001,3.3,0.37,8.8 +6.5,0.22,0.25,17.1,0.05,44.0,138.0,1.0001,3.3,0.37,8.8 +6.5,0.22,0.25,17.1,0.05,44.0,138.0,1.0001,3.3,0.37,8.8 +5.7,0.33,0.15,1.9,0.05,20.0,93.0,0.9934,3.38,0.62,9.9 +7.7,0.23,0.31,10.7,0.038,59.0,186.0,0.9969,3.12,0.55,9.5 +6.5,0.22,0.25,17.1,0.05,44.0,138.0,1.0001,3.3,0.37,8.8 +6.8,0.2,0.27,1.2,0.034,19.0,68.0,0.9902,3.14,0.37,11.7 +7.7,0.26,0.32,1.2,0.04,26.0,117.0,0.993,3.21,0.56,10.8 +6.4,0.2,0.32,3.1,0.041,18.0,126.0,0.9914,3.43,0.42,12.0 +8.0,0.16,0.36,1.5,0.033,14.0,122.0,0.9941,3.2,0.39,10.3 +6.8,0.25,0.27,10.7,0.076,47.0,154.0,0.9967,3.05,0.38,9.0 +7.7,0.39,0.28,4.9,0.035,36.0,109.0,0.9918,3.19,0.58,12.2 +6.9,0.26,0.33,12.6,0.051,59.0,173.0,0.998,3.39,0.38,9.9 +6.8,0.25,0.27,10.7,0.076,47.0,154.0,0.9967,3.05,0.38,9.0 +7.7,0.39,0.28,4.9,0.035,36.0,109.0,0.9918,3.19,0.58,12.2 +6.0,0.28,0.22,12.15,0.048,42.0,163.0,0.9957,3.2,0.46,10.1 +6.5,0.43,0.28,12.0,0.056,23.0,174.0,0.9986,3.31,0.55,9.3 +9.1,0.33,0.38,1.7,0.062,50.5,344.0,0.9958,3.1,0.7,9.5 +5.9,0.5,0.05,2.6,0.054,36.0,146.0,0.9948,3.43,0.5,9.2 +6.8,0.28,0.39,1.4,0.036,15.0,115.0,0.9918,3.27,0.72,11.7 +7.0,0.35,0.24,1.9,0.04,21.0,144.0,0.9923,3.35,0.38,11.0 +7.1,0.22,0.32,16.9,0.056,49.0,158.0,0.9998,3.37,0.38,9.6 +7.1,0.22,0.32,16.9,0.056,49.0,158.0,0.9998,3.37,0.38,9.6 +8.3,0.24,0.27,2.1,0.03,22.0,162.0,0.9914,2.99,0.68,11.9 +6.8,0.26,0.32,7.0,0.041,38.0,118.0,0.9939,3.25,0.52,10.8 +7.2,0.16,0.26,7.1,0.054,41.0,224.0,0.9966,3.38,0.55,10.1 +7.9,0.18,0.36,5.9,0.058,31.0,132.0,0.995,3.25,0.52,10.9 +7.2,0.16,0.26,7.1,0.054,41.0,224.0,0.9966,3.38,0.55,10.1 +5.5,0.24,0.32,8.7,0.06,19.0,102.0,0.994,3.27,0.31,10.4 +7.1,0.33,0.64,13.2,0.056,12.0,105.0,0.9972,3.05,0.39,9.2 +7.7,0.28,0.35,15.3,0.056,31.0,117.0,0.9998,3.27,0.5,9.6 +7.7,0.28,0.35,15.3,0.056,31.0,117.0,0.9998,3.27,0.5,9.6 +7.5,0.26,0.52,13.2,0.047,64.0,179.0,0.9982,3.1,0.46,9.0 +6.5,0.14,0.32,2.7,0.037,18.0,89.0,0.9924,3.4,0.74,11.5 +8.2,0.21,0.32,10.65,0.053,53.0,145.0,0.9972,3.17,0.48,10.2 +7.2,0.2,0.31,10.0,0.054,49.0,165.0,0.997,3.4,0.42,9.9 +7.2,0.115,0.3,6.8,0.056,26.0,105.0,0.9954,3.44,0.4,9.6 +6.4,0.29,0.2,15.6,0.04,20.0,142.0,0.9962,3.1,0.54,10.6 +7.1,0.33,0.64,13.2,0.056,12.0,105.0,0.9972,3.05,0.39,9.2 +6.8,0.24,0.34,5.1,0.038,31.0,99.0,0.9921,3.24,0.46,11.8 +7.0,0.24,0.34,3.0,0.035,36.0,102.0,0.9905,3.18,0.43,12.2 +7.7,0.28,0.35,15.3,0.056,31.0,117.0,0.9998,3.27,0.5,9.6 +7.0,0.22,0.33,2.1,0.052,15.0,76.0,0.993,3.2,0.41,10.6 +7.5,0.18,0.39,1.9,0.054,23.0,91.0,0.9941,3.27,0.45,10.3 +9.8,0.93,0.45,8.6,0.052,34.0,187.0,0.9994,3.12,0.59,10.2 +7.8,0.29,0.33,8.75,0.035,33.0,181.0,0.9962,3.11,0.46,10.7 +7.9,0.28,0.32,3.6,0.038,9.0,76.0,0.992,3.05,0.31,11.7 +8.5,0.25,0.27,4.7,0.031,31.0,92.0,0.9922,3.01,0.33,12.0 +7.4,0.18,0.27,1.3,0.048,26.0,105.0,0.994,3.52,0.66,10.6 +6.3,0.24,0.37,1.8,0.031,6.0,61.0,0.9897,3.3,0.34,12.2 +6.0,0.33,0.38,9.7,0.04,29.0,124.0,0.9954,3.47,0.48,11.0 +6.8,0.37,0.28,4.0,0.03,29.0,79.0,0.99,3.23,0.46,12.4 +9.9,0.49,0.23,2.4,0.087,19.0,115.0,0.9948,2.77,0.44,9.4 +8.5,0.25,0.27,4.7,0.031,31.0,92.0,0.9922,3.01,0.33,12.0 +8.4,0.22,0.28,18.8,0.028,55.0,130.0,0.998,2.96,0.35,11.6 +7.0,0.35,0.31,1.8,0.069,15.0,162.0,0.9944,3.18,0.47,9.4 +7.0,0.35,0.31,1.8,0.069,15.0,162.0,0.9944,3.18,0.47,9.4 +7.4,0.19,0.3,12.8,0.053,48.5,229.0,0.9986,3.14,0.49,9.1 +7.4,0.19,0.3,12.8,0.053,48.5,229.0,0.9986,3.14,0.49,9.1 +7.4,0.19,0.3,12.8,0.053,48.5,229.0,0.9986,3.14,0.49,9.1 +7.4,0.19,0.3,12.8,0.053,48.5,229.0,0.9986,3.14,0.49,9.1 +7.4,0.19,0.3,12.8,0.053,48.5,229.0,0.9986,3.14,0.49,9.1 +6.9,0.32,0.13,7.8,0.042,11.0,117.0,0.996,3.23,0.37,9.2 +7.6,0.32,0.58,16.75,0.05,43.0,163.0,0.9999,3.15,0.54,9.2 +7.4,0.19,0.3,12.8,0.053,48.5,229.0,0.9986,3.14,0.49,9.1 +7.4,0.19,0.3,12.8,0.053,48.5,212.0,0.9986,3.14,0.49,9.1 +6.9,0.32,0.13,7.8,0.042,11.0,117.0,0.996,3.23,0.37,9.2 +6.0,0.34,0.24,5.4,0.06,23.0,126.0,0.9951,3.25,0.44,9.0 +7.6,0.32,0.58,16.75,0.05,43.0,163.0,0.9999,3.15,0.54,9.2 +7.7,0.24,0.31,1.3,0.047,33.0,106.0,0.993,3.22,0.55,10.8 +8.0,0.36,0.43,10.1,0.053,29.0,146.0,0.9982,3.4,0.46,9.5 +7.4,0.29,0.25,3.8,0.044,30.0,114.0,0.992,3.11,0.4,11.0 +6.6,0.32,0.27,10.9,0.041,37.0,146.0,0.9963,3.24,0.47,10.0 +6.3,0.3,0.24,6.6,0.04,38.0,141.0,0.995,3.22,0.47,9.5 +6.4,0.33,0.24,9.8,0.041,29.0,109.0,0.9956,3.29,0.47,10.1 +7.5,0.18,0.31,11.7,0.051,24.0,94.0,0.997,3.19,0.44,9.5 +6.5,0.39,0.81,1.2,0.217,14.0,74.0,0.9936,3.08,0.53,9.5 +6.8,0.25,0.18,1.4,0.056,13.0,137.0,0.9935,3.11,0.42,9.5 +6.4,0.18,0.32,9.6,0.052,24.0,90.0,0.9963,3.35,0.49,9.4 +7.1,0.18,0.32,12.2,0.048,36.0,125.0,0.9967,2.92,0.54,9.4 +7.6,0.27,0.42,2.6,0.044,29.0,110.0,0.9912,3.31,0.51,12.7 +9.2,0.23,0.35,10.7,0.037,34.0,145.0,0.9981,3.09,0.32,9.7 +7.9,0.28,0.41,4.9,0.058,31.0,153.0,0.9966,3.27,0.51,9.7 +7.1,0.18,0.32,12.2,0.048,36.0,125.0,0.9967,2.92,0.54,9.4 +6.4,0.18,0.32,9.6,0.052,24.0,90.0,0.9963,3.35,0.49,9.4 +6.8,0.25,0.18,1.4,0.056,13.0,137.0,0.9935,3.11,0.42,9.5 +7.0,0.22,0.26,1.1,0.037,20.0,71.0,0.9902,3.1,0.38,11.7 +7.3,0.18,0.29,1.0,0.036,26.0,101.0,0.99,3.09,0.37,11.7 +7.1,0.26,0.19,8.2,0.051,53.0,187.0,0.996,3.16,0.52,9.7 +6.6,0.25,0.42,11.3,0.049,77.0,231.0,0.9966,3.24,0.52,9.5 +6.4,0.24,0.23,7.3,0.069,31.0,157.0,0.9962,3.25,0.53,9.1 +6.0,0.28,0.27,2.3,0.051,23.0,147.0,0.994,3.23,0.67,10.3 +7.1,0.26,0.19,8.2,0.051,53.0,187.0,0.996,3.16,0.52,9.7 +7.8,0.24,0.38,2.1,0.058,14.0,167.0,0.994,3.21,0.55,9.9 +7.6,0.27,0.33,2.0,0.059,19.0,175.0,0.9944,3.22,0.56,9.9 +7.7,0.39,0.34,10.0,0.056,35.0,178.0,0.9974,3.26,0.6,10.2 +8.9,0.24,0.33,15.75,0.035,16.0,132.0,0.996,3.0,0.37,12.1 +6.6,0.23,0.24,3.9,0.045,36.0,138.0,0.9922,3.15,0.64,11.3 +7.1,0.26,0.3,2.0,0.031,13.0,128.0,0.9917,3.19,0.49,11.4 +7.0,0.32,0.35,1.5,0.039,24.0,125.0,0.9918,3.17,0.64,12.2 +7.4,0.24,0.26,1.6,0.058,53.0,150.0,0.9936,3.18,0.5,9.9 +6.9,0.21,0.33,1.4,0.056,35.0,136.0,0.9938,3.63,0.78,10.3 +7.0,0.32,0.35,1.5,0.039,24.0,125.0,0.9918,3.17,0.64,12.2 +7.4,0.17,0.29,1.4,0.047,23.0,107.0,0.9939,3.52,0.65,10.4 +7.1,0.26,0.3,2.0,0.031,13.0,128.0,0.9917,3.19,0.49,11.4 +8.5,0.28,0.34,13.8,0.041,32.0,161.0,0.9981,3.13,0.4,9.9 +7.8,0.3,0.37,1.3,0.051,16.0,96.0,0.9941,3.32,0.62,10.0 +8.1,0.25,0.38,3.8,0.051,18.0,129.0,0.9928,3.21,0.38,11.5 +7.7,0.28,0.29,6.9,0.041,29.0,163.0,0.9952,3.44,0.6,10.5 +6.5,0.24,0.36,2.2,0.027,36.0,134.0,0.9898,3.28,0.36,12.5 +7.0,0.22,0.32,1.6,0.045,40.0,120.0,0.9914,2.98,0.44,10.5 +8.5,0.28,0.34,13.8,0.041,32.0,161.0,0.9981,3.13,0.4,9.9 +8.0,0.45,0.28,10.8,0.051,25.0,157.0,0.9957,3.06,0.47,11.4 +6.9,0.23,0.33,12.8,0.056,44.0,169.0,0.998,3.42,0.42,9.8 +8.0,0.45,0.28,10.8,0.051,25.0,157.0,0.9957,3.06,0.47,11.4 +7.6,0.23,0.26,15.3,0.067,32.0,166.0,0.9986,3.03,0.44,9.2 +7.7,0.28,0.58,12.1,0.046,60.0,177.0,0.9983,3.08,0.46,8.9 +7.7,0.27,0.61,12.0,0.046,64.0,179.0,0.9982,3.07,0.46,8.9 +7.1,0.2,0.36,11.6,0.042,45.0,124.0,0.997,2.92,0.59,9.5 +6.9,0.25,0.35,9.2,0.034,42.0,150.0,0.9947,3.21,0.36,11.5 +7.1,0.2,0.36,11.6,0.042,45.0,124.0,0.997,2.92,0.59,9.5 +6.9,0.25,0.35,9.2,0.034,42.0,150.0,0.9947,3.21,0.36,11.5 +8.4,0.2,0.31,2.8,0.054,16.0,89.0,0.99416,2.96,0.45,9.5 +6.5,0.39,0.35,1.6,0.049,10.0,164.0,0.99516,3.35,0.51,9.7 +7.2,0.23,0.38,6.1,0.067,20.0,90.0,0.99496,3.17,0.79,9.7 +6.9,0.44,0.42,8.5,0.048,10.0,147.0,0.9974,3.32,0.46,9.5 +7.1,0.28,0.19,7.8,0.04,48.0,184.0,0.99579,3.16,0.5,9.4 +6.4,0.34,0.2,14.9,0.06,37.0,162.0,0.9983,3.13,0.45,9.0 +6.1,0.15,0.29,6.2,0.046,39.0,151.0,0.99471,3.6,0.44,10.6 +6.9,0.44,0.42,8.5,0.048,10.0,147.0,0.9974,3.32,0.46,9.5 +7.2,0.29,0.18,8.2,0.042,41.0,180.0,0.99644,3.16,0.49,9.1 +7.1,0.28,0.19,7.8,0.04,48.0,184.0,0.99579,3.16,0.5,9.4 +6.1,0.23,0.45,10.6,0.094,49.0,169.0,0.99699,3.05,0.54,8.8 +6.7,0.23,0.42,11.2,0.047,52.0,171.0,0.99758,3.54,0.74,10.4 +7.0,0.36,0.14,11.6,0.043,35.0,228.0,0.9977,3.13,0.51,8.9 +7.5,0.31,0.24,7.1,0.031,28.0,141.0,0.99397,3.16,0.38,10.6 +6.4,0.34,0.2,14.9,0.06,37.0,162.0,0.9983,3.13,0.45,9.0 +6.1,0.15,0.29,6.2,0.046,39.0,151.0,0.99471,3.6,0.44,10.6 +7.4,0.2,0.29,1.7,0.047,16.0,100.0,0.99243,3.28,0.45,10.6 +6.3,0.27,0.18,7.7,0.048,45.0,186.0,0.9962,3.23,0.47,9.0 +9.2,0.34,0.54,17.3,0.06,46.0,235.0,1.00182,3.08,0.61,8.8 +7.4,0.18,0.29,1.4,0.042,34.0,101.0,0.99384,3.54,0.6,10.5 +7.2,0.29,0.2,7.7,0.046,51.0,174.0,0.99582,3.16,0.52,9.5 +6.3,0.27,0.18,7.7,0.048,45.0,186.0,0.9962,3.23,0.47,9.0 +6.2,0.26,0.19,3.4,0.049,47.0,172.0,0.9924,3.14,0.43,10.4 +7.3,0.21,0.21,1.6,0.046,35.0,133.0,0.99466,3.38,0.46,10.0 +7.1,0.14,0.35,1.4,0.039,24.0,128.0,0.99212,2.97,0.68,10.4 +7.2,0.39,0.54,1.4,0.157,34.0,132.0,0.99449,3.11,0.53,9.0 +7.6,0.48,0.28,10.4,0.049,57.0,205.0,0.99748,3.24,0.45,9.3 +7.2,0.39,0.54,1.4,0.157,34.0,132.0,0.99449,3.11,0.53,9.0 +7.6,0.48,0.28,10.4,0.049,57.0,205.0,0.99748,3.24,0.45,9.3 +6.5,0.36,0.31,4.1,0.061,20.0,134.0,0.99475,3.18,0.45,9.0 +8.5,0.25,0.31,2.8,0.032,11.0,61.0,0.99189,3.06,0.44,11.5 +6.9,0.3,0.21,15.7,0.056,49.0,159.0,0.99827,3.11,0.48,9.0 +6.6,0.19,0.43,10.9,0.045,53.0,154.0,0.99752,3.52,0.77,10.4 +6.9,0.3,0.21,15.7,0.056,49.0,159.0,0.99827,3.11,0.48,9.0 +9.4,0.42,0.32,6.5,0.027,20.0,167.0,0.99479,3.08,0.43,10.6 +6.6,0.19,0.43,10.9,0.045,53.0,154.0,0.99752,3.52,0.77,10.4 +6.3,0.2,0.3,5.9,0.034,35.0,152.0,0.99642,3.47,0.4,8.5 +8.5,0.19,0.56,17.3,0.055,47.0,169.0,1.00047,3.07,0.67,9.3 +7.3,0.19,0.25,1.4,0.051,41.0,107.0,0.99382,3.53,0.66,10.5 +6.7,0.25,0.26,13.5,0.06,50.0,156.0,0.99784,3.39,0.46,9.9 +6.2,0.25,0.28,8.5,0.035,28.0,108.0,0.99486,3.4,0.42,10.4 +6.1,0.46,0.32,6.2,0.053,10.0,94.0,0.99537,3.35,0.47,10.1 +7.3,0.19,0.25,1.4,0.051,41.0,107.0,0.99382,3.53,0.66,10.5 +7.5,0.29,0.26,14.95,0.067,47.0,178.0,0.99838,3.04,0.49,9.2 +6.7,0.31,0.18,7.7,0.043,57.0,200.0,0.99566,3.17,0.44,9.4 +7.4,0.14,0.3,1.3,0.033,25.0,91.0,0.99268,3.53,0.39,10.6 +6.7,0.31,0.18,7.7,0.043,57.0,200.0,0.99566,3.17,0.44,9.4 +7.1,0.4,0.52,1.3,0.148,45.0,149.0,0.99468,3.08,0.56,8.7 +6.4,0.16,0.25,1.3,0.047,20.0,77.0,0.9933,3.61,0.54,10.2 +6.3,0.16,0.22,1.3,0.046,18.0,66.0,0.99307,3.61,0.55,10.3 +7.4,0.33,0.26,15.6,0.049,67.0,210.0,0.99907,3.06,0.68,9.5 +7.4,0.33,0.26,15.6,0.049,67.0,210.0,0.99907,3.06,0.68,9.5 +7.4,0.33,0.26,15.6,0.049,67.0,210.0,0.99907,3.06,0.68,9.5 +7.4,0.33,0.26,15.6,0.049,67.0,210.0,0.99907,3.06,0.68,9.5 +6.6,0.41,0.24,4.9,0.158,47.0,144.0,0.99471,3.17,0.49,9.4 +6.7,0.43,0.23,5.0,0.157,49.0,145.0,0.99471,3.17,0.49,9.4 +7.4,0.33,0.26,15.6,0.049,67.0,210.0,0.99907,3.06,0.68,9.5 +7.3,0.4,0.28,6.5,0.037,26.0,97.0,0.99148,3.16,0.58,12.6 +7.4,0.18,0.24,1.4,0.047,21.0,106.0,0.99383,3.52,0.64,10.5 +8.6,0.17,0.28,2.7,0.047,38.0,150.0,0.99365,3.1,0.56,10.8 +6.5,0.32,0.23,1.2,0.054,39.0,208.0,0.99272,3.18,0.46,9.9 +7.3,0.4,0.28,6.5,0.037,26.0,97.0,0.99148,3.16,0.58,12.6 +7.0,0.32,0.31,6.4,0.031,38.0,115.0,0.99235,3.38,0.58,12.2 +7.5,0.42,0.19,6.9,0.041,62.0,150.0,0.99508,3.23,0.37,10.0 +6.9,0.28,0.31,7.2,0.04,47.0,168.0,0.9946,3.29,0.57,10.6 +6.5,0.29,0.42,10.6,0.042,66.0,202.0,0.99674,3.24,0.53,9.5 +6.3,0.41,0.18,3.5,0.027,23.0,109.0,0.99018,3.34,0.54,12.8 +7.0,0.32,0.31,6.4,0.031,38.0,115.0,0.99235,3.38,0.58,12.2 +7.3,0.3,0.33,2.3,0.043,28.0,125.0,0.99084,3.34,0.44,12.6 +6.6,0.22,0.28,12.05,0.058,25.0,125.0,0.99856,3.45,0.45,9.4 +6.0,0.26,0.18,7.0,0.055,50.0,194.0,0.99591,3.21,0.43,9.0 +6.9,0.44,0.18,11.8,0.051,26.0,126.0,0.9975,3.23,0.48,9.1 +7.5,0.42,0.2,1.4,0.06,15.0,168.0,0.9944,3.06,0.4,9.4 +7.0,0.36,0.3,5.0,0.04,40.0,143.0,0.99173,3.33,0.42,12.2 +5.6,0.295,0.2,2.2,0.049,18.0,134.0,0.99378,3.21,0.68,10.0 +6.8,0.21,0.55,14.6,0.053,34.0,159.0,0.99805,2.93,0.44,9.2 +9.4,0.28,0.3,1.6,0.045,36.0,139.0,0.99534,3.11,0.49,9.3 +8.1,0.28,0.34,1.3,0.035,11.0,126.0,0.99232,3.14,0.5,9.8 +6.8,0.21,0.55,14.6,0.053,34.0,159.0,0.99805,2.93,0.44,9.2 +7.0,0.22,0.26,2.8,0.036,44.0,132.0,0.99078,3.34,0.41,12.0 +9.4,0.28,0.3,1.6,0.045,36.0,139.0,0.99534,3.11,0.49,9.3 +6.8,0.32,0.3,3.3,0.029,15.0,80.0,0.99061,3.33,0.63,12.6 +7.0,0.19,0.33,6.3,0.032,42.0,127.0,0.99182,3.31,0.38,12.2 +7.7,0.42,0.38,8.1,0.061,49.0,144.0,0.9966,3.4,0.58,11.0 +7.4,0.2,0.31,1.6,0.038,34.0,116.0,0.9912,3.25,0.39,12.0 +7.5,0.24,0.62,10.6,0.045,51.0,153.0,0.99779,3.16,0.44,8.8 +7.5,0.26,0.59,11.8,0.046,58.0,164.0,0.99814,3.17,0.46,8.9 +6.6,0.4,0.32,1.7,0.035,39.0,84.0,0.99096,3.59,0.48,12.7 +8.0,0.2,0.3,8.1,0.037,42.0,130.0,0.99379,3.1,0.67,11.8 +4.6,0.445,0.0,1.4,0.053,11.0,178.0,0.99426,3.79,0.55,10.2 +6.1,0.41,0.04,1.3,0.036,23.0,121.0,0.99228,3.24,0.61,9.9 +7.6,0.2,0.34,1.8,0.041,42.0,148.0,0.99335,3.35,0.66,11.1 +6.9,0.3,0.21,7.2,0.045,54.0,190.0,0.99595,3.22,0.48,9.4 +7.0,0.35,0.17,1.1,0.049,7.0,119.0,0.99297,3.13,0.36,9.7 +6.9,0.35,0.55,11.95,0.038,22.0,111.0,0.99687,3.11,0.29,9.7 +7.0,0.35,0.17,1.1,0.049,7.0,119.0,0.99297,3.13,0.36,9.7 +6.9,0.35,0.55,11.95,0.038,22.0,111.0,0.99687,3.11,0.29,9.7 +7.6,0.3,0.4,2.2,0.054,29.0,175.0,0.99445,3.19,0.53,9.8 +7.5,0.38,0.29,12.7,0.05,25.0,209.0,0.9986,3.25,0.59,9.3 +7.5,0.3,0.32,1.4,0.032,31.0,161.0,0.99154,2.95,0.42,10.5 +6.3,0.4,0.32,10.6,0.049,38.0,209.0,0.9981,3.47,0.59,9.3 +6.8,0.37,0.28,1.9,0.024,64.0,106.0,0.98993,3.45,0.6,12.6 +7.5,0.23,0.35,17.8,0.058,128.0,212.0,1.00241,3.44,0.43,8.9 +8.3,0.27,0.34,10.2,0.048,50.0,118.0,0.99716,3.18,0.51,10.3 +6.8,0.26,0.22,4.8,0.041,110.0,198.0,0.99437,3.29,0.67,10.6 +6.5,0.28,0.35,9.8,0.067,61.0,180.0,0.9972,3.15,0.57,9.0 +7.2,0.34,0.3,8.4,0.051,40.0,167.0,0.99756,3.48,0.62,9.7 +7.0,0.23,0.26,7.2,0.041,21.0,90.0,0.99509,3.22,0.55,9.5 +7.7,0.29,0.29,4.8,0.06,27.0,156.0,0.99572,3.49,0.59,10.3 +7.2,0.34,0.3,8.4,0.051,40.0,167.0,0.99756,3.48,0.62,9.7 +7.7,0.4,0.27,4.5,0.034,27.0,95.0,0.99175,3.21,0.59,12.3 +6.7,0.17,0.27,1.4,0.032,39.0,149.0,0.99254,3.4,0.52,10.5 +7.0,0.23,0.26,7.2,0.041,21.0,90.0,0.99509,3.22,0.55,9.5 +8.1,0.24,0.26,11.0,0.043,41.0,211.0,0.99676,3.11,0.49,10.0 +7.7,0.28,0.63,11.1,0.039,58.0,179.0,0.9979,3.08,0.44,8.8 +7.5,0.23,0.29,2.6,0.031,24.0,98.0,0.99194,3.0,0.54,10.9 +8.3,0.26,0.31,2.0,0.029,14.0,141.0,0.99077,2.95,0.77,12.2 +7.9,0.46,0.4,10.1,0.168,19.0,184.0,0.99782,3.06,0.62,9.5 +7.9,0.31,0.22,13.3,0.048,46.0,212.0,0.99942,3.47,0.59,10.0 +7.9,0.25,0.34,11.4,0.04,53.0,202.0,0.99708,3.11,0.57,9.6 +6.1,0.28,0.16,1.3,0.06,36.0,126.0,0.99353,3.13,0.46,8.7 +7.0,0.18,0.26,1.4,0.044,46.0,89.0,0.99256,3.39,0.48,10.7 +6.5,0.21,0.28,1.4,0.046,26.0,66.0,0.99199,3.43,0.48,11.1 +7.6,0.48,0.33,7.0,0.024,14.0,130.0,0.9918,3.25,0.45,12.5 +7.1,0.34,0.32,2.0,0.051,29.0,130.0,0.99354,3.3,0.5,10.4 +8.9,0.21,0.37,1.2,0.028,20.0,93.0,0.99244,3.2,0.37,11.5 +7.4,0.32,0.27,12.9,0.04,60.0,221.0,0.99831,3.05,0.66,9.4 +6.0,0.495,0.27,5.0,0.157,17.0,129.0,0.99396,3.03,0.36,9.3 +8.1,0.25,0.34,10.1,0.05,30.0,121.0,0.99724,3.17,0.49,10.1 +8.2,0.25,0.46,3.75,0.05,14.0,102.0,0.99524,3.28,0.58,9.7 +6.5,0.18,0.29,1.7,0.035,39.0,144.0,0.9927,3.49,0.5,10.5 +6.7,0.24,0.26,12.6,0.053,44.0,182.0,0.99802,3.42,0.42,9.7 +6.6,0.32,0.24,1.3,0.06,42.5,204.0,0.99512,3.59,0.51,9.2 +7.6,0.32,0.35,1.6,0.092,24.0,138.0,0.99438,3.19,0.44,9.8 +7.4,0.33,0.44,7.6,0.05,40.0,227.0,0.99679,3.12,0.52,9.0 +7.2,0.3,0.3,8.1,0.05,40.0,188.0,0.99652,3.15,0.49,9.1 +7.4,0.34,0.3,14.9,0.037,70.0,169.0,0.99698,3.25,0.37,10.4 +6.1,0.16,0.29,6.0,0.03,29.0,144.0,0.99474,3.68,0.46,10.7 +6.3,0.1,0.24,6.0,0.039,25.0,107.0,0.99511,3.59,0.49,10.5 +6.2,0.45,0.73,7.2,0.099,47.0,202.0,0.99582,3.21,0.43,9.2 +6.0,0.33,0.18,3.0,0.036,5.0,85.0,0.99125,3.28,0.4,11.5 +7.6,0.48,0.37,1.2,0.034,5.0,57.0,0.99256,3.05,0.54,10.4 +7.2,0.2,0.3,2.0,0.039,43.0,188.0,0.9911,3.3,0.41,12.0 +7.0,0.32,0.29,4.9,0.036,41.0,150.0,0.99168,3.38,0.43,12.2 +7.2,0.2,0.3,2.0,0.039,43.0,188.0,0.9911,3.3,0.41,12.0 +7.0,0.22,0.29,8.9,0.05,24.0,90.0,0.99556,3.29,0.46,9.8 +9.4,0.23,0.56,16.45,0.063,52.5,282.0,1.00098,3.1,0.51,9.3 +6.4,0.27,0.19,2.0,0.084,21.0,191.0,0.99516,3.49,0.63,9.6 +6.4,0.27,0.19,1.9,0.085,21.0,196.0,0.99516,3.49,0.64,9.5 +7.0,0.23,0.42,5.1,0.042,37.0,144.0,0.99518,3.5,0.59,10.2 +6.9,0.15,0.28,4.4,0.029,14.0,107.0,0.99347,3.24,0.46,10.4 +6.7,0.26,0.29,5.8,0.025,26.0,74.0,0.9929,3.28,0.53,11.0 +6.9,0.15,0.28,4.4,0.029,14.0,107.0,0.99347,3.24,0.46,10.4 +7.6,0.2,0.68,12.9,0.042,56.0,160.0,0.99841,3.05,0.41,8.7 +6.9,0.3,0.29,1.3,0.053,24.0,189.0,0.99362,3.29,0.54,9.9 +6.9,0.3,0.3,1.3,0.053,24.0,186.0,0.99361,3.29,0.54,9.9 +7.6,0.21,0.35,1.2,0.041,7.0,106.0,0.9914,3.06,0.45,11.3 +6.8,0.46,0.26,2.7,0.042,28.0,83.0,0.99114,3.38,0.51,12.0 +7.0,0.28,0.26,1.7,0.042,34.0,130.0,0.9925,3.43,0.5,10.7 +6.5,0.24,0.29,8.2,0.043,32.0,156.0,0.99453,3.13,0.7,10.1 +6.4,0.17,0.34,1.5,0.091,42.0,135.0,0.9938,3.25,0.49,9.6 +6.4,0.17,0.34,1.5,0.093,43.0,136.0,0.9938,3.25,0.49,9.6 +6.3,0.695,0.55,12.9,0.056,58.0,252.0,0.99806,3.29,0.49,8.7 +7.0,0.27,0.29,3.9,0.059,28.0,199.0,0.9961,3.54,0.59,10.3 +8.4,0.3,0.25,17.75,0.047,25.0,218.0,1.00016,2.98,0.66,9.1 +6.5,0.19,0.27,4.9,0.037,13.0,101.0,0.9916,3.17,0.41,11.8 +8.0,0.36,0.39,1.6,0.024,26.0,93.0,0.99116,3.15,0.49,11.9 +6.1,0.16,0.24,1.4,0.046,17.0,77.0,0.99319,3.66,0.57,10.3 +9.2,0.19,0.42,2.0,0.047,16.0,104.0,0.99517,3.09,0.66,10.0 +9.2,0.16,0.49,2.0,0.044,18.0,107.0,0.99514,3.1,0.53,10.2 +8.0,0.26,0.28,8.2,0.038,72.0,202.0,0.99566,3.12,0.56,10.0 +8.8,0.33,0.36,2.1,0.034,19.0,125.0,0.99166,2.96,0.98,12.7 +9.8,0.16,0.46,1.8,0.046,23.0,130.0,0.99587,3.04,0.67,9.6 +6.6,0.23,0.18,8.5,0.044,59.0,188.0,0.99558,3.16,0.49,9.5 +7.9,0.44,0.26,4.45,0.033,23.0,100.0,0.99117,3.17,0.52,12.7 +7.6,0.31,0.27,5.8,0.036,23.0,109.0,0.99399,3.34,0.54,11.0 +7.5,0.705,0.1,13.0,0.044,44.0,214.0,0.99741,3.1,0.5,9.1 +7.1,0.21,0.28,2.7,0.034,23.0,111.0,0.99405,3.35,0.64,10.2 +7.0,0.16,0.26,7.3,0.047,30.0,220.0,0.99622,3.38,0.58,10.1 +8.0,0.27,0.25,19.1,0.045,50.0,208.0,1.00051,3.05,0.5,9.2 +6.3,0.38,0.17,8.8,0.08,50.0,212.0,0.99803,3.47,0.66,9.4 +7.1,0.21,0.28,2.7,0.034,23.0,111.0,0.99405,3.35,0.64,10.2 +6.2,0.38,0.18,7.4,0.095,28.0,195.0,0.99773,3.53,0.71,9.2 +8.2,0.24,0.3,2.3,0.05,23.0,106.0,0.99397,2.98,0.5,10.0 +7.0,0.16,0.26,6.85,0.047,30.0,220.0,0.99622,3.38,0.58,10.1 +7.3,0.815,0.09,11.4,0.044,45.0,204.0,0.99713,3.15,0.46,9.0 +6.3,0.41,0.16,0.9,0.032,25.0,98.0,0.99274,3.16,0.42,9.5 +6.1,0.36,0.41,19.35,0.07,67.0,207.0,1.00118,3.39,0.53,9.1 +8.1,0.4,0.32,7.9,0.031,23.0,118.0,0.99176,3.05,0.46,13.3 +6.8,0.26,0.43,11.75,0.045,53.0,198.0,0.9969,3.26,0.55,9.5 +6.2,0.44,0.18,7.7,0.096,28.0,210.0,0.99771,3.56,0.72,9.2 +7.2,0.24,0.29,3.0,0.036,17.0,117.0,0.99411,3.36,0.68,10.1 +6.2,0.44,0.18,7.7,0.096,28.0,210.0,0.99771,3.56,0.72,9.2 +7.2,0.24,0.29,3.0,0.036,17.0,117.0,0.99411,3.36,0.68,10.1 +7.3,0.22,0.26,1.5,0.04,32.0,172.0,0.99194,3.27,0.48,11.2 +8.1,0.34,0.28,7.5,0.04,70.0,230.0,0.99558,3.14,0.55,9.8 +7.3,0.22,0.26,1.5,0.04,32.0,172.0,0.99194,3.27,0.48,11.2 +8.1,0.34,0.28,7.5,0.04,70.0,230.0,0.99558,3.14,0.55,9.8 +6.4,0.28,0.17,8.3,0.042,61.0,195.0,0.99577,3.22,0.46,9.4 +6.3,0.29,0.14,7.05,0.045,50.0,177.0,0.99564,3.23,0.42,9.0 +6.4,0.27,0.17,8.4,0.044,60.0,198.0,0.99578,3.21,0.47,9.4 +7.4,0.35,0.2,13.9,0.054,63.0,229.0,0.99888,3.11,0.5,8.9 +8.3,0.28,0.27,17.5,0.045,48.0,253.0,1.00014,3.02,0.56,9.1 +6.4,0.35,0.35,5.6,0.034,9.0,148.0,0.99441,3.17,0.5,9.8 +6.9,0.43,0.28,9.4,0.056,29.0,183.0,0.99594,3.17,0.43,9.4 +8.0,0.26,0.28,4.8,0.05,34.0,150.0,0.99437,3.13,0.5,10.0 +6.9,0.43,0.28,9.4,0.056,29.0,183.0,0.99594,3.17,0.43,9.4 +7.3,0.27,0.37,9.7,0.042,36.0,130.0,0.9979,3.48,0.75,9.9 +6.8,0.46,0.26,6.3,0.147,49.0,159.0,0.99434,3.14,0.47,10.0 +7.2,0.2,0.28,1.6,0.028,13.0,168.0,0.99203,3.17,1.06,11.5 +7.6,0.285,0.32,14.6,0.063,32.0,201.0,0.998,3.0,0.45,9.2 +6.6,0.32,0.33,2.5,0.052,40.0,219.5,0.99316,3.15,0.6,10.0 +7.6,0.285,0.32,14.6,0.063,32.0,201.0,0.998,3.0,0.45,9.2 +6.6,0.34,0.34,2.6,0.051,40.5,210.0,0.99314,3.15,0.61,10.0 +6.6,0.32,0.33,2.5,0.052,40.0,210.0,0.99316,3.15,0.6,10.0 +6.5,0.27,0.26,8.2,0.042,21.0,133.0,0.99612,3.43,0.64,9.8 +6.6,0.26,0.27,1.5,0.04,19.0,114.0,0.99295,3.36,0.62,10.5 +6.7,0.27,0.26,2.3,0.043,61.0,181.0,0.99394,3.45,0.63,10.6 +6.6,0.56,0.15,10.0,0.037,38.0,157.0,0.99642,3.28,0.52,9.4 +6.6,0.56,0.15,10.0,0.037,38.0,157.0,0.99642,3.28,0.52,9.4 +7.3,0.19,0.27,1.6,0.027,35.0,136.0,0.99248,3.38,0.54,11.0 +6.3,0.2,0.26,1.6,0.027,36.0,141.0,0.99268,3.53,0.56,10.8 +7.1,0.29,0.3,16.0,0.036,58.0,201.0,0.99954,3.3,0.67,9.0 +7.8,0.32,0.33,10.4,0.031,47.0,194.0,0.99692,3.07,0.58,9.6 +8.1,0.33,0.36,7.4,0.037,36.0,156.0,0.99592,3.19,0.54,10.6 +8.1,0.33,0.36,7.4,0.037,36.0,156.0,0.99592,3.19,0.54,10.6 +7.8,0.32,0.33,10.4,0.031,47.0,194.0,0.99692,3.07,0.58,9.6 +6.6,0.33,0.24,16.05,0.045,31.0,147.0,0.99822,3.08,0.52,9.2 +6.6,0.33,0.24,16.05,0.045,31.0,147.0,0.99822,3.08,0.52,9.2 +8.2,0.26,0.33,2.6,0.053,11.0,71.0,0.99402,2.89,0.49,9.5 +8.3,0.25,0.33,2.5,0.053,12.0,72.0,0.99404,2.89,0.48,9.5 +7.0,0.26,0.26,10.8,0.039,37.0,184.0,0.99787,3.47,0.58,10.3 +6.0,0.26,0.15,1.2,0.053,35.0,124.0,0.99347,3.08,0.46,8.8 +7.5,0.28,0.78,12.1,0.041,53.0,161.0,0.99838,2.98,0.44,8.7 +7.5,0.27,0.79,11.95,0.04,51.0,159.0,0.99839,2.98,0.44,8.7 +7.0,0.28,0.32,1.7,0.038,27.0,128.0,0.99375,3.2,0.62,10.2 +5.2,0.16,0.34,0.8,0.029,26.0,77.0,0.99155,3.25,0.51,10.1 +6.8,0.34,0.1,1.4,0.049,29.0,118.0,0.9936,3.21,0.41,9.5 +7.6,0.25,0.34,1.3,0.056,34.0,176.0,0.99434,3.1,0.51,9.5 +5.6,0.35,0.4,6.3,0.022,23.0,174.0,0.9922,3.54,0.5,11.6 +8.8,0.24,0.23,10.3,0.032,12.0,97.0,0.99571,3.13,0.4,10.7 +6.0,0.29,0.21,15.55,0.043,20.0,142.0,0.99658,3.11,0.54,10.1 +6.1,0.27,0.31,1.5,0.035,17.0,83.0,0.99076,3.32,0.44,11.1 +7.4,0.56,0.09,1.5,0.071,19.0,117.0,0.99496,3.22,0.53,9.8 +6.8,0.29,0.49,1.4,0.142,52.0,148.0,0.9937,3.08,0.49,9.0 +6.1,0.27,0.31,1.5,0.035,17.0,83.0,0.99076,3.32,0.44,11.1 +6.3,0.27,0.37,7.9,0.047,58.0,215.0,0.99542,3.19,0.48,9.5 +6.6,0.24,0.3,13.0,0.052,18.0,143.0,0.99825,3.37,0.49,9.4 +6.8,0.32,0.3,1.0,0.049,22.0,113.0,0.99289,3.24,0.61,10.2 +6.4,0.37,0.37,4.85,0.041,39.5,216.5,0.99432,3.1,0.5,9.8 +6.2,0.26,0.37,7.1,0.047,54.0,201.0,0.99523,3.19,0.48,9.5 +6.3,0.27,0.37,7.9,0.047,58.0,215.0,0.99542,3.19,0.48,9.5 +6.4,0.3,0.16,7.5,0.05,55.0,191.0,0.9959,3.17,0.49,9.0 +8.0,0.28,0.32,7.6,0.045,61.0,204.0,0.99543,3.1,0.55,10.1 +6.7,0.24,0.32,10.3,0.079,37.0,122.0,0.99662,3.02,0.45,8.8 +7.9,0.27,0.27,1.7,0.034,25.0,122.0,0.99088,2.97,0.51,11.9 +7.9,0.27,0.27,1.7,0.034,25.0,122.0,0.99088,2.97,0.51,11.9 +6.1,0.28,0.24,19.95,0.074,32.0,174.0,0.99922,3.19,0.44,9.3 +7.7,0.39,0.49,7.7,0.036,11.0,110.0,0.9966,3.33,0.76,10.0 +6.0,0.2,0.24,5.3,0.075,49.0,201.0,0.99466,3.21,0.43,9.5 +6.1,0.28,0.24,19.95,0.074,32.0,174.0,0.99922,3.19,0.44,9.3 +7.6,0.31,0.23,12.7,0.054,20.0,139.0,0.99836,3.16,0.5,9.7 +7.6,0.31,0.23,12.7,0.054,20.0,139.0,0.99836,3.16,0.5,9.7 +6.3,0.18,0.22,1.5,0.043,45.0,155.0,0.99238,3.19,0.48,10.2 +8.6,0.23,0.25,11.3,0.031,13.0,96.0,0.99645,3.11,0.4,10.8 +6.8,0.21,0.36,18.1,0.046,32.0,133.0,1.0,3.27,0.48,8.8 +6.8,0.21,0.36,18.1,0.046,32.0,133.0,1.0,3.27,0.48,8.8 +6.9,0.26,0.31,7.0,0.039,37.0,175.0,0.99376,3.32,0.49,11.4 +6.8,0.21,0.36,18.1,0.046,32.0,133.0,1.0,3.27,0.48,8.8 +6.4,0.31,0.4,6.4,0.039,39.0,191.0,0.99513,3.14,0.52,9.8 +8.6,0.34,0.36,1.4,0.045,11.0,119.0,0.99556,3.17,0.47,9.4 +8.6,0.34,0.36,1.4,0.045,11.0,119.0,0.99556,3.17,0.47,9.4 +8.5,0.3,0.28,3.1,0.054,54.0,174.0,0.99543,3.21,0.43,9.4 +7.4,0.4,0.41,14.1,0.053,37.0,194.0,0.99886,3.2,0.63,9.4 +6.6,0.32,0.34,7.7,0.044,63.0,212.0,0.99526,3.22,0.48,9.7 +7.1,0.34,0.31,5.2,0.032,36.0,140.0,0.99166,3.35,0.47,12.3 +6.6,0.26,0.25,11.6,0.045,45.0,178.0,0.99691,3.33,0.43,9.8 +8.0,0.27,0.57,10.4,0.053,18.0,134.0,0.99732,3.12,0.68,9.0 +6.2,0.28,0.45,7.5,0.045,46.0,203.0,0.99573,3.26,0.46,9.2 +6.2,0.3,0.49,11.2,0.058,68.0,215.0,0.99656,3.19,0.6,9.4 +5.6,0.175,0.29,0.8,0.043,20.0,67.0,0.99112,3.28,0.48,9.9 +6.9,0.34,0.36,1.4,0.032,13.0,145.0,0.99214,3.07,0.52,9.8 +6.9,0.34,0.3,4.7,0.029,34.0,148.0,0.99165,3.36,0.49,12.3 +7.1,0.12,0.3,3.1,0.018,15.0,37.0,0.99004,3.02,0.52,11.9 +7.1,0.32,0.29,4.0,0.038,33.0,170.0,0.99463,3.27,0.64,10.2 +7.3,0.51,0.29,11.3,0.034,61.0,224.0,0.99683,3.14,0.56,9.5 +7.1,0.12,0.3,3.1,0.018,15.0,37.0,0.99004,3.02,0.52,11.9 +6.3,0.24,0.55,8.1,0.04,67.0,216.0,0.99596,3.24,0.5,9.2 +7.5,0.41,0.23,14.8,0.054,28.0,174.0,0.99898,3.18,0.49,9.7 +6.5,0.18,0.33,1.4,0.029,35.0,138.0,0.99114,3.36,0.6,11.5 +7.3,0.17,0.24,8.1,0.121,32.0,162.0,0.99508,3.17,0.38,10.4 +8.2,0.2,0.38,3.5,0.053,41.0,174.0,0.99306,3.22,0.41,11.6 +7.5,0.41,0.23,14.8,0.054,28.0,174.0,0.99898,3.18,0.49,9.7 +7.3,0.17,0.24,8.1,0.121,32.0,162.0,0.99508,3.17,0.38,10.4 +6.5,0.18,0.33,1.4,0.029,35.0,138.0,0.99114,3.36,0.6,11.5 +7.3,0.16,0.35,1.5,0.036,29.0,108.0,0.99342,3.27,0.51,10.2 +6.4,0.16,0.37,1.5,0.037,27.0,109.0,0.99345,3.38,0.5,9.8 +6.6,0.42,0.13,12.8,0.044,26.0,158.0,0.99772,3.24,0.47,9.0 +5.8,0.3,0.12,1.6,0.036,57.0,163.0,0.99239,3.38,0.59,10.5 +6.7,0.54,0.27,7.1,0.049,8.0,178.0,0.99502,3.16,0.38,9.4 +6.7,0.54,0.27,7.1,0.049,8.0,178.0,0.99502,3.16,0.38,9.4 +6.4,0.22,0.3,11.2,0.046,53.0,149.0,0.99479,3.21,0.34,10.8 +6.8,0.23,0.3,1.7,0.043,19.0,95.0,0.99207,3.17,0.46,10.7 +9.0,0.26,0.34,6.7,0.029,21.0,162.0,0.99497,3.08,0.5,10.6 +6.5,0.23,0.25,17.3,0.046,15.0,110.0,0.99828,3.15,0.42,9.2 +5.9,0.28,0.14,8.6,0.032,30.0,142.0,0.99542,3.28,0.44,9.5 +5.9,0.28,0.14,8.6,0.032,30.0,142.0,0.99542,3.28,0.44,9.5 +6.2,0.27,0.18,1.5,0.028,20.0,111.0,0.99228,3.41,0.5,10.0 +9.0,0.29,0.34,12.1,0.03,34.0,177.0,0.99706,3.13,0.47,10.6 +9.0,0.26,0.34,6.7,0.029,21.0,162.0,0.99497,3.08,0.5,10.6 +8.9,0.27,0.34,10.7,0.029,19.5,166.0,0.99669,3.13,0.48,10.6 +6.5,0.23,0.25,17.3,0.046,15.0,110.0,0.99828,3.15,0.42,9.2 +6.9,0.32,0.3,1.8,0.036,28.0,117.0,0.99269,3.24,0.48,11.0 +7.2,0.22,0.24,1.4,0.041,17.0,159.0,0.99196,3.25,0.53,11.2 +6.7,0.5,0.38,7.5,0.046,26.0,175.0,0.99662,3.32,0.54,9.6 +6.2,0.33,0.14,4.8,0.052,27.0,128.0,0.99475,3.21,0.48,9.4 +6.3,0.26,0.42,7.1,0.045,62.0,209.0,0.99544,3.2,0.53,9.5 +7.5,0.2,0.47,16.9,0.052,51.0,188.0,0.99944,3.09,0.62,9.3 +6.2,0.33,0.14,4.8,0.052,27.0,128.0,0.99475,3.21,0.48,9.4 +6.3,0.26,0.42,7.1,0.045,62.0,209.0,0.99544,3.2,0.53,9.5 +6.6,0.36,0.52,11.3,0.046,8.0,110.0,0.9966,3.07,0.46,9.4 +6.3,0.13,0.42,1.1,0.043,63.0,146.0,0.99066,3.13,0.72,11.2 +6.4,0.15,0.44,1.2,0.043,67.0,150.0,0.9907,3.14,0.73,11.2 +6.3,0.13,0.42,1.1,0.043,63.0,146.0,0.99066,3.13,0.72,11.2 +7.6,0.23,0.64,12.9,0.033,54.0,170.0,0.998,3.0,0.53,8.8 +6.4,0.15,0.44,1.2,0.043,67.0,150.0,0.9907,3.14,0.73,11.2 +6.3,0.13,0.42,1.1,0.043,63.0,146.0,0.99066,3.13,0.72,11.2 +5.7,0.255,0.65,1.2,0.079,17.0,137.0,0.99307,3.2,0.42,9.4 +6.9,0.32,0.26,2.3,0.03,11.0,103.0,0.99106,3.06,0.42,11.1 +6.9,0.28,0.22,10.0,0.052,36.0,131.0,0.99696,3.08,0.46,9.6 +6.9,0.32,0.26,2.3,0.03,11.0,103.0,0.99106,3.06,0.42,11.1 +5.7,0.255,0.65,1.2,0.079,17.0,137.0,0.99307,3.2,0.42,9.4 +6.6,0.41,0.16,1.4,0.037,28.0,160.0,0.99167,2.95,0.45,10.6 +7.3,0.37,0.16,14.9,0.048,59.0,240.0,0.99902,3.13,0.45,8.9 +6.9,0.21,0.24,1.8,0.021,17.0,80.0,0.98992,3.15,0.46,12.3 +6.6,0.24,0.28,1.8,0.028,39.0,132.0,0.99182,3.34,0.46,11.4 +6.8,0.28,0.36,7.0,0.043,60.0,207.0,0.99556,3.16,0.49,9.6 +6.6,0.24,0.24,8.6,0.034,25.0,135.0,0.99582,3.33,0.59,10.3 +6.6,0.24,0.28,1.8,0.028,39.0,132.0,0.99182,3.34,0.46,11.4 +7.0,0.16,0.32,1.1,0.032,29.0,80.0,0.98972,3.23,0.36,12.1 +7.0,0.14,0.28,1.3,0.026,10.0,56.0,0.99352,3.46,0.45,9.9 +6.3,0.34,0.36,4.9,0.035,31.0,185.0,0.9946,3.15,0.49,9.7 +6.8,0.26,0.24,1.9,0.043,70.0,154.0,0.99273,3.18,0.52,10.5 +6.7,0.17,0.42,10.4,0.038,85.0,182.0,0.99628,3.04,0.44,8.9 +6.5,0.27,0.4,10.0,0.039,74.0,227.0,0.99582,3.18,0.5,9.4 +6.7,0.25,0.36,8.6,0.037,63.0,206.0,0.99553,3.18,0.5,9.6 +5.8,0.3,0.27,1.7,0.014,45.0,104.0,0.98914,3.4,0.56,12.6 +6.4,0.28,0.56,1.7,0.156,49.0,106.0,0.99354,3.1,0.37,9.2 +7.7,0.3,0.26,18.95,0.053,36.0,174.0,0.99976,3.2,0.5,10.4 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +5.1,0.14,0.25,0.7,0.039,15.0,89.0,0.9919,3.22,0.43,9.2 +6.8,0.18,0.3,12.8,0.062,19.0,171.0,0.99808,3.0,0.52,9.0 +7.2,0.615,0.1,1.4,0.068,25.0,154.0,0.99499,3.2,0.48,9.7 +6.9,0.13,0.28,13.3,0.05,47.0,132.0,0.99655,3.34,0.42,10.1 +6.7,0.34,0.3,8.5,0.059,24.0,152.0,0.99615,3.46,0.64,11.0 +7.3,0.32,0.29,1.5,0.038,32.0,144.0,0.99296,3.2,0.55,10.8 +6.3,0.21,0.29,11.7,0.048,49.0,147.0,0.99482,3.22,0.38,10.8 +5.4,0.5,0.13,5.0,0.028,12.0,107.0,0.99079,3.48,0.88,13.5 +8.2,0.52,0.34,1.2,0.042,18.0,167.0,0.99366,3.24,0.39,10.6 +7.8,0.28,0.31,2.1,0.046,28.0,208.0,0.99434,3.23,0.64,9.8 +6.4,0.22,0.34,1.4,0.023,56.0,115.0,0.98958,3.18,0.7,11.7 +7.8,0.28,0.31,2.1,0.046,28.0,208.0,0.99434,3.23,0.64,9.8 +6.9,0.32,0.27,16.0,0.034,58.0,185.0,0.99938,3.34,0.6,9.0 +6.8,0.11,0.42,1.1,0.042,51.0,132.0,0.99059,3.18,0.74,11.3 +6.2,0.26,0.32,15.3,0.031,64.0,185.0,0.99835,3.31,0.61,9.4 +6.4,0.22,0.34,1.4,0.023,56.0,115.0,0.98958,3.18,0.7,11.7 +6.7,0.3,0.29,2.8,0.025,37.0,107.0,0.99159,3.31,0.63,11.3 +6.7,0.3,0.29,2.8,0.025,37.0,107.0,0.99159,3.31,0.63,11.3 +7.1,0.2,0.3,0.9,0.019,4.0,28.0,0.98931,3.2,0.36,12.0 +7.2,0.2,0.36,2.5,0.028,22.0,157.0,0.9938,3.48,0.49,10.6 +8.9,0.26,0.33,8.1,0.024,47.0,202.0,0.99558,3.13,0.46,10.8 +7.5,0.25,0.32,8.2,0.024,53.0,209.0,0.99563,3.12,0.46,10.8 +7.1,0.2,0.3,0.9,0.019,4.0,28.0,0.98931,3.2,0.36,12.0 +6.3,0.27,0.46,11.1,0.053,44.0,177.0,0.99691,3.18,0.67,9.4 +6.5,0.3,0.39,7.8,0.038,61.0,219.0,0.9959,3.19,0.5,9.4 +6.7,0.3,0.29,2.8,0.025,37.0,107.0,0.99159,3.31,0.63,11.3 +6.6,0.36,0.52,10.1,0.05,29.0,140.0,0.99628,3.07,0.4,9.4 +6.15,0.21,0.37,3.2,0.021,20.0,80.0,0.99076,3.39,0.47,12.0 +6.5,0.18,0.41,14.2,0.039,47.0,129.0,0.99678,3.28,0.72,10.3 +6.5,0.18,0.41,14.2,0.039,47.0,129.0,0.99678,3.28,0.72,10.3 +6.5,0.18,0.41,14.2,0.039,47.0,129.0,0.99678,3.28,0.72,10.3 +6.6,0.26,0.21,2.9,0.026,48.0,126.0,0.99089,3.22,0.38,11.3 +6.6,0.35,0.35,6.0,0.063,31.0,150.0,0.99537,3.1,0.47,9.4 +6.5,0.28,0.28,20.4,0.041,40.0,144.0,1.0002,3.14,0.38,8.7 +6.6,0.36,0.52,10.1,0.05,29.0,140.0,0.99628,3.07,0.4,9.4 +6.6,0.26,0.21,2.9,0.026,48.0,126.0,0.99089,3.22,0.38,11.3 +6.5,0.18,0.41,14.2,0.039,47.0,129.0,0.99678,3.28,0.72,10.3 +6.15,0.21,0.37,3.2,0.021,20.0,80.0,0.99076,3.39,0.47,12.0 +4.5,0.19,0.21,0.95,0.033,89.0,159.0,0.99332,3.34,0.42,8.0 +8.0,0.24,0.26,1.7,0.033,36.0,136.0,0.99316,3.44,0.51,10.4 +7.8,0.17,0.23,1.7,0.029,39.0,128.0,0.99272,3.37,0.41,10.7 +7.0,0.24,0.24,9.0,0.03,42.0,219.0,0.99636,3.47,0.46,10.2 +5.8,0.6,0.0,1.3,0.044,72.0,197.0,0.99202,3.56,0.43,10.9 +5.9,0.445,0.26,1.4,0.027,23.0,109.0,0.99148,3.3,0.36,10.5 +6.7,0.28,0.28,2.4,0.012,36.0,100.0,0.99064,3.26,0.39,11.7 +6.8,0.44,0.2,16.0,0.065,61.0,186.0,0.99884,3.13,0.45,8.6 +7.2,0.24,0.27,11.4,0.034,40.0,174.0,0.99773,3.2,0.44,9.0 +8.7,0.31,0.73,14.35,0.044,27.0,191.0,1.00013,2.96,0.88,8.7 +8.2,0.32,0.26,2.1,0.062,26.0,87.0,0.98974,3.1,0.47,12.8 +7.2,0.24,0.27,11.4,0.034,40.0,174.0,0.99773,3.2,0.44,9.0 +8.7,0.31,0.73,14.35,0.044,27.0,191.0,1.00013,2.96,0.88,8.7 +7.5,0.13,0.38,1.1,0.023,42.0,104.0,0.99112,3.28,0.53,11.8 +9.2,0.14,0.37,1.1,0.034,36.0,84.0,0.99136,3.05,0.55,11.6 +7.4,0.2,0.37,1.2,0.028,28.0,89.0,0.99132,3.14,0.61,11.8 +6.1,0.15,0.35,15.8,0.042,55.0,158.0,0.99642,3.24,0.37,10.6 +7.6,0.23,0.4,5.2,0.066,14.0,91.0,0.99488,3.17,0.8,9.7 +8.1,0.33,0.22,5.2,0.047,24.0,151.0,0.99527,3.22,0.47,10.3 +7.15,0.17,0.24,9.6,0.119,56.0,178.0,0.99578,3.15,0.44,10.2 +6.7,0.12,0.3,5.2,0.048,38.0,113.0,0.99352,3.33,0.44,10.1 +5.7,0.18,0.36,1.2,0.046,9.0,71.0,0.99199,3.7,0.68,10.9 +5.8,0.15,0.28,0.8,0.037,43.0,127.0,0.99198,3.24,0.51,9.3 +6.6,0.23,0.29,14.45,0.057,29.0,144.0,0.99756,3.33,0.54,10.2 +7.15,0.17,0.24,9.6,0.119,56.0,178.0,0.99578,3.15,0.44,10.2 +7.0,0.34,0.39,6.9,0.066,43.0,162.0,0.99561,3.11,0.53,9.5 +6.4,0.68,0.26,3.4,0.069,25.0,146.0,0.99347,3.18,0.4,9.3 +7.3,0.22,0.31,2.3,0.018,45.0,80.0,0.98936,3.06,0.34,12.9 +6.4,0.28,0.27,11.0,0.042,45.0,148.0,0.99786,3.14,0.46,8.7 +6.9,0.4,0.22,5.95,0.081,76.0,303.0,0.99705,3.4,0.57,9.4 +6.8,0.19,0.23,5.1,0.034,71.0,204.0,0.9942,3.23,0.69,10.1 +7.1,0.23,0.24,5.4,0.039,60.0,196.0,0.9948,3.19,0.78,10.0 +6.45,0.14,0.42,1.2,0.05,51.0,129.0,0.99116,3.27,0.69,11.1 +6.5,0.15,0.44,12.6,0.052,65.0,158.0,0.99688,3.26,0.7,10.3 +7.1,0.15,0.34,1.0,0.033,27.0,73.0,0.98974,3.24,0.41,12.2 +6.7,0.33,0.34,6.6,0.067,35.0,156.0,0.99542,3.11,0.48,9.3 +7.2,0.3,0.26,1.5,0.041,46.0,178.0,0.99154,3.19,0.56,11.3 +7.0,0.23,0.33,1.0,0.043,46.0,110.0,0.99118,3.04,0.65,10.8 +8.0,0.13,0.25,1.1,0.033,15.0,86.0,0.99044,2.98,0.39,11.2 +6.2,0.21,0.34,6.6,0.03,36.0,91.0,0.9914,3.32,0.45,12.5 +8.3,0.4,0.41,8.2,0.05,15.0,122.0,0.9979,3.39,0.49,9.3 +5.9,0.34,0.31,2.0,0.03,38.0,142.0,0.98892,3.4,0.41,12.9 +6.6,0.12,0.25,1.4,0.039,21.0,131.0,0.99114,3.2,0.45,11.2 +9.6,0.655,0.21,2.0,0.039,21.0,120.0,0.99188,3.0,1.0,12.6 +6.8,0.26,0.4,7.5,0.046,45.0,179.0,0.99583,3.2,0.49,9.3 +5.9,0.34,0.31,2.0,0.03,38.0,142.0,0.98892,3.4,0.41,12.9 +5.9,0.3,0.3,2.0,0.03,38.0,142.0,0.98892,3.41,0.41,12.9 +7.0,0.15,0.3,13.3,0.049,46.0,120.0,0.99704,3.2,0.36,9.5 +7.9,0.37,0.31,2.85,0.037,5.0,24.0,0.9911,3.19,0.36,11.9 +7.2,0.35,0.25,5.6,0.032,23.0,120.0,0.99334,2.93,0.66,10.3 +7.2,0.32,0.24,5.6,0.033,23.0,120.0,0.99334,2.92,0.66,10.3 +7.6,0.1,0.33,1.0,0.031,33.0,93.0,0.99094,3.06,0.68,11.2 +6.2,0.25,0.31,3.2,0.03,32.0,150.0,0.99014,3.18,0.31,12.0 +7.1,0.31,0.17,1.0,0.042,21.0,144.0,0.99304,3.13,0.4,9.6 +7.6,0.18,0.28,7.1,0.041,29.0,110.0,0.99652,3.2,0.42,9.2 +8.0,0.17,0.29,2.4,0.029,52.0,119.0,0.98944,3.03,0.33,12.9 +7.2,0.19,0.27,11.2,0.061,46.0,149.0,0.99772,2.99,0.59,9.3 +7.6,0.32,0.25,9.5,0.03,15.0,136.0,0.99367,3.1,0.44,12.1 +7.1,0.31,0.17,1.0,0.042,21.0,144.0,0.99304,3.13,0.4,9.6 +6.6,0.21,0.29,1.8,0.026,35.0,128.0,0.99183,3.37,0.48,11.2 +7.0,0.16,0.36,2.6,0.029,28.0,98.0,0.99126,3.11,0.37,11.2 +8.0,0.17,0.29,2.4,0.029,52.0,119.0,0.98944,3.03,0.33,12.9 +6.6,0.24,0.38,8.0,0.042,56.0,187.0,0.99577,3.21,0.46,9.2 +7.2,0.19,0.27,11.2,0.061,46.0,149.0,0.99772,2.99,0.59,9.3 +7.6,0.18,0.28,7.1,0.041,29.0,110.0,0.99652,3.2,0.42,9.2 +6.9,0.3,0.25,3.3,0.041,26.0,124.0,0.99428,3.18,0.5,9.3 +6.2,0.28,0.27,10.3,0.03,26.0,108.0,0.99388,3.2,0.36,10.7 +6.9,0.31,0.32,1.2,0.024,20.0,166.0,0.99208,3.05,0.54,9.8 +6.7,0.23,0.25,1.6,0.036,28.0,143.0,0.99256,3.3,0.54,10.3 +6.2,0.28,0.27,10.3,0.03,26.0,108.0,0.99388,3.2,0.36,10.7 +5.7,0.23,0.28,9.65,0.025,26.0,121.0,0.9925,3.28,0.38,11.3 +6.5,0.22,0.5,16.4,0.048,36.0,182.0,0.99904,3.02,0.49,8.8 +7.0,0.18,0.37,1.5,0.043,16.0,104.0,0.99216,3.18,0.5,10.8 +6.9,0.31,0.32,1.2,0.024,20.0,166.0,0.99208,3.05,0.54,9.8 +6.9,0.3,0.25,3.3,0.041,26.0,124.0,0.99428,3.18,0.5,9.3 +6.5,0.46,0.31,5.0,0.027,15.0,72.0,0.99165,3.26,0.6,11.5 +6.5,0.23,0.36,16.3,0.038,43.0,133.0,0.99924,3.26,0.41,8.8 +6.5,0.23,0.36,16.3,0.038,43.0,133.0,0.99924,3.26,0.41,8.8 +6.5,0.23,0.36,16.3,0.038,43.0,133.0,0.99924,3.26,0.41,8.8 +6.6,0.26,0.38,6.5,0.17,68.0,201.0,0.9956,3.19,0.38,9.4 +6.7,0.26,0.39,6.4,0.171,64.0,200.0,0.99562,3.19,0.38,9.4 +7.5,0.28,0.39,10.2,0.045,59.0,209.0,0.9972,3.16,0.63,9.6 +6.5,0.23,0.36,16.3,0.038,43.0,133.0,0.99924,3.26,0.41,8.8 +6.8,0.23,0.42,7.4,0.044,56.0,189.0,0.9958,3.22,0.48,9.3 +7.8,0.25,0.34,13.7,0.044,66.0,184.0,0.99976,3.22,0.75,8.9 +7.8,0.25,0.34,13.7,0.044,66.0,184.0,0.99976,3.22,0.75,8.9 +5.6,0.2,0.22,1.3,0.049,25.0,155.0,0.99296,3.74,0.43,10.0 +6.4,0.21,0.44,7.4,0.045,47.0,182.0,0.9957,3.24,0.46,9.1 +6.8,0.23,0.42,7.4,0.044,56.0,189.0,0.9958,3.22,0.48,9.3 +6.8,0.24,0.37,7.45,0.043,59.0,188.0,0.99579,3.2,0.5,9.4 +7.8,0.25,0.28,7.2,0.04,46.0,179.0,0.99541,3.14,0.6,10.1 +7.8,0.25,0.34,13.7,0.044,66.0,184.0,0.99976,3.22,0.75,8.9 +6.8,0.16,0.29,10.4,0.046,59.0,143.0,0.99518,3.2,0.4,10.8 +5.2,0.28,0.29,1.1,0.028,18.0,69.0,0.99168,3.24,0.54,10.0 +7.5,0.18,0.31,6.5,0.029,53.0,160.0,0.99276,3.03,0.38,10.9 +7.5,0.26,0.3,4.6,0.027,29.0,92.0,0.99085,3.15,0.38,12.0 +8.2,0.37,0.64,13.9,0.043,22.0,171.0,0.99873,2.99,0.8,9.3 +7.6,0.4,0.27,5.2,0.03,32.0,101.0,0.99172,3.22,0.62,12.3 +7.5,0.26,0.25,1.7,0.038,29.0,129.0,0.99312,3.45,0.56,10.4 +7.5,0.18,0.31,6.5,0.029,53.0,160.0,0.99276,3.03,0.38,10.9 +6.9,0.23,0.32,16.4,0.045,62.0,153.0,0.9972,3.22,0.42,10.5 +5.3,0.2,0.31,3.6,0.036,22.0,91.0,0.99278,3.41,0.5,9.8 +6.5,0.17,0.31,1.5,0.041,34.0,121.0,0.99092,3.06,0.46,10.5 +6.5,0.35,0.28,12.4,0.051,86.0,213.0,0.9962,3.16,0.51,9.9 +6.5,0.29,0.31,1.7,0.035,24.0,79.0,0.99053,3.27,0.69,11.4 +6.8,0.3,0.22,6.2,0.06,41.0,190.0,0.99858,3.18,0.51,9.2 +7.9,0.51,0.36,6.2,0.051,30.0,173.0,0.9984,3.09,0.53,9.7 +7.9,0.51,0.34,2.6,0.049,13.0,135.0,0.99335,3.09,0.51,10.0 +6.5,0.29,0.31,1.7,0.035,24.0,79.0,0.99053,3.27,0.69,11.4 +7.1,0.29,0.28,9.3,0.048,50.0,141.0,0.9949,3.13,0.49,10.3 +6.5,0.35,0.28,12.4,0.051,86.0,213.0,0.9962,3.16,0.51,9.9 +6.5,0.17,0.31,1.5,0.041,34.0,121.0,0.99092,3.06,0.46,10.5 +7.4,0.2,0.28,9.1,0.047,29.0,95.0,0.99532,3.16,0.47,9.8 +6.9,0.615,0.42,12.0,0.067,24.0,131.0,0.99727,3.19,0.34,9.3 +6.8,0.32,0.28,4.8,0.034,25.0,100.0,0.99026,3.08,0.47,12.4 +6.3,0.2,0.19,12.3,0.048,54.0,145.0,0.99668,3.16,0.42,9.3 +6.9,0.615,0.42,12.0,0.067,24.0,131.0,0.99727,3.19,0.34,9.3 +8.0,0.23,0.28,2.7,0.048,49.0,165.0,0.9952,3.26,0.72,9.5 +6.7,0.27,0.33,3.6,0.034,9.0,45.0,0.99144,3.08,0.4,10.5 +6.7,0.27,0.33,3.6,0.034,9.0,45.0,0.99144,3.08,0.4,10.5 +6.7,0.44,0.22,4.3,0.032,19.0,99.0,0.99015,3.26,0.53,12.8 +7.0,0.34,0.3,1.8,0.045,44.0,142.0,0.9914,2.99,0.45,10.8 +7.3,0.26,0.33,11.8,0.057,48.0,127.0,0.99693,3.1,0.55,10.0 +5.8,0.17,0.34,1.8,0.045,96.0,170.0,0.99035,3.38,0.9,11.8 +7.3,0.26,0.33,11.8,0.057,48.0,127.0,0.99693,3.1,0.55,10.0 +5.8,0.17,0.34,1.8,0.045,96.0,170.0,0.99035,3.38,0.9,11.8 +6.8,0.17,0.36,1.4,0.036,38.0,108.0,0.99006,3.19,0.66,12.0 +7.1,0.43,0.3,6.6,0.025,15.0,138.0,0.99126,3.18,0.46,12.6 +5.8,0.315,0.27,1.55,0.026,15.0,70.0,0.98994,3.37,0.4,11.9 +5.9,0.17,0.28,0.7,0.027,5.0,28.0,0.98985,3.13,0.32,10.6 +6.6,0.34,0.18,6.4,0.082,47.0,240.0,0.9971,3.42,0.48,9.2 +8.6,0.33,0.34,11.8,0.059,42.0,240.0,0.99882,3.17,0.52,10.0 +5.6,0.12,0.26,4.3,0.038,18.0,97.0,0.99477,3.36,0.46,9.2 +5.8,0.13,0.26,5.1,0.039,19.0,103.0,0.99478,3.36,0.47,9.3 +7.7,0.18,0.35,5.8,0.055,25.0,144.0,0.99576,3.24,0.54,10.2 +7.7,0.16,0.36,5.9,0.054,25.0,148.0,0.99578,3.25,0.54,10.2 +6.0,0.26,0.15,1.3,0.06,51.0,154.0,0.99354,3.14,0.51,8.7 +7.3,0.32,0.35,1.4,0.05,8.0,163.0,0.99244,3.24,0.42,10.7 +7.7,0.3,0.34,1.2,0.048,4.0,119.0,0.99084,3.18,0.34,12.1 +7.9,0.16,0.3,7.4,0.05,58.0,152.0,0.99612,3.12,0.37,9.5 +6.4,0.27,0.29,10.8,0.028,17.0,118.0,0.99356,3.18,0.37,11.2 +6.9,0.16,0.37,1.8,0.034,36.0,95.0,0.98952,2.93,0.59,12.0 +7.9,0.16,0.3,7.4,0.05,58.0,152.0,0.99612,3.12,0.37,9.5 +7.7,0.3,0.34,1.2,0.048,4.0,119.0,0.99084,3.18,0.34,12.1 +7.3,0.32,0.35,1.4,0.05,8.0,163.0,0.99244,3.24,0.42,10.7 +6.4,0.44,0.44,14.4,0.048,29.0,228.0,0.99955,3.26,0.54,8.8 +6.3,0.2,0.24,1.7,0.052,36.0,135.0,0.99374,3.8,0.66,10.8 +6.2,0.29,0.32,3.6,0.026,39.0,138.0,0.9892,3.31,0.37,13.1 +7.6,0.39,0.32,3.6,0.035,22.0,93.0,0.99144,3.08,0.6,12.5 +7.0,0.36,0.32,10.05,0.045,37.0,131.0,0.99352,3.09,0.33,11.7 +7.0,0.36,0.32,10.05,0.045,37.0,131.0,0.99352,3.09,0.33,11.7 +7.0,0.36,0.32,10.5,0.045,35.0,135.0,0.9935,3.09,0.33,11.6 +7.6,0.2,0.36,1.9,0.043,24.0,111.0,0.99237,3.29,0.54,11.3 +7.6,0.39,0.32,3.6,0.035,22.0,93.0,0.99144,3.08,0.6,12.5 +6.7,0.2,0.37,1.65,0.025,42.0,103.0,0.99022,3.11,0.45,11.4 +6.2,0.235,0.34,1.9,0.036,4.0,117.0,0.99032,3.4,0.44,12.2 +7.8,0.965,0.6,65.8,0.074,8.0,160.0,1.03898,3.39,0.69,11.7 +7.1,0.2,0.31,6.85,0.053,32.0,211.0,0.99587,3.31,0.59,10.4 +7.1,0.2,0.31,7.4,0.053,32.0,211.0,0.99587,3.31,0.59,10.4 +7.1,0.2,0.31,7.4,0.053,32.0,211.0,0.99587,3.31,0.59,10.4 +6.4,0.24,0.25,20.2,0.083,35.0,157.0,0.99976,3.17,0.5,9.1 +8.0,0.3,0.36,11.0,0.034,8.0,70.0,0.99354,3.05,0.41,12.2 +6.4,0.24,0.25,20.2,0.083,35.0,157.0,0.99976,3.17,0.5,9.1 +6.9,0.4,0.42,6.2,0.066,41.0,176.0,0.99552,3.12,0.54,9.4 +6.9,0.4,0.43,6.2,0.065,42.0,178.0,0.99552,3.11,0.53,9.4 +7.1,0.2,0.31,6.85,0.053,32.0,211.0,0.99587,3.31,0.59,10.4 +6.6,0.25,0.51,8.0,0.047,61.0,189.0,0.99604,3.22,0.49,9.2 +6.8,0.26,0.44,8.2,0.046,52.0,183.0,0.99584,3.2,0.51,9.4 +6.5,0.37,0.3,2.2,0.033,39.0,107.0,0.98894,3.22,0.53,13.5 +6.8,0.35,0.53,10.1,0.053,37.0,151.0,0.9963,3.07,0.4,9.4 +6.4,0.22,0.32,7.2,0.028,15.0,83.0,0.993,3.13,0.55,10.9 +6.5,0.37,0.3,2.2,0.033,39.0,107.0,0.98894,3.22,0.53,13.5 +6.8,0.35,0.53,10.1,0.053,37.0,151.0,0.9963,3.07,0.4,9.4 +6.9,0.31,0.32,1.6,0.036,34.0,114.0,0.99068,3.19,0.45,11.4 +6.7,0.16,0.37,1.3,0.036,45.0,125.0,0.98964,3.19,0.51,12.4 +6.6,0.25,0.51,8.0,0.047,61.0,189.0,0.99604,3.22,0.49,9.2 +6.8,0.26,0.44,8.2,0.046,52.0,183.0,0.99584,3.2,0.51,9.4 +5.6,0.15,0.31,5.3,0.038,8.0,79.0,0.9923,3.3,0.39,10.5 +5.5,0.15,0.32,14.0,0.031,16.0,99.0,0.99437,3.26,0.38,11.5 +6.4,0.22,0.32,7.2,0.028,15.0,83.0,0.993,3.13,0.55,10.9 +7.3,0.2,0.26,1.6,0.04,36.0,123.0,0.99238,3.34,0.44,10.8 +7.5,0.17,0.71,11.8,0.038,52.0,148.0,0.99801,3.03,0.46,8.9 +7.5,0.18,0.72,9.6,0.039,53.0,151.0,0.99802,3.03,0.46,8.9 +7.0,0.27,0.48,6.1,0.042,60.0,184.0,0.99566,3.2,0.5,9.4 +5.8,0.32,0.31,2.7,0.049,25.0,153.0,0.99067,3.44,0.73,12.2 +7.8,0.26,0.31,3.6,0.025,22.0,100.0,0.99066,2.99,0.47,12.1 +7.4,0.3,0.32,1.7,0.03,23.0,128.0,0.9929,3.17,0.66,10.9 +6.7,0.16,0.34,1.6,0.026,27.0,109.0,0.9934,3.34,0.58,10.1 +5.8,0.32,0.31,2.7,0.049,25.0,153.0,0.99067,3.44,0.73,12.2 +6.7,0.19,0.39,1.0,0.032,14.0,71.0,0.98912,3.31,0.38,13.0 +6.6,0.36,0.24,0.9,0.038,15.0,72.0,0.99066,3.23,0.39,11.0 +7.2,0.17,0.41,1.6,0.052,24.0,126.0,0.99228,3.19,0.49,10.8 +6.7,0.19,0.39,1.0,0.032,14.0,71.0,0.98912,3.31,0.38,13.0 +6.0,0.11,0.47,10.6,0.052,69.0,148.0,0.9958,2.91,0.34,9.3 +6.0,0.21,0.34,2.0,0.042,63.0,123.0,0.99052,3.44,0.42,11.4 +6.7,0.325,0.82,1.2,0.152,49.0,120.0,0.99312,2.99,0.38,9.2 +6.6,0.4,0.46,6.2,0.056,42.0,241.0,0.9968,3.5,0.6,9.9 +6.5,0.2,0.24,9.2,0.044,25.0,150.0,0.99502,3.22,0.44,10.5 +7.6,0.27,0.34,5.0,0.04,18.0,56.0,0.99084,3.06,0.48,12.4 +7.2,0.26,0.4,6.3,0.047,52.0,172.0,0.99573,3.18,0.53,9.5 +6.3,0.25,0.22,3.3,0.048,41.0,161.0,0.99256,3.16,0.5,10.5 +6.5,0.22,0.45,8.0,0.053,52.0,196.0,0.9959,3.23,0.48,9.1 +6.4,0.14,0.31,1.2,0.034,53.0,138.0,0.99084,3.38,0.35,11.5 +6.4,0.14,0.31,1.2,0.034,53.0,138.0,0.99084,3.38,0.35,11.5 +7.1,0.26,0.32,16.2,0.044,31.0,170.0,0.99644,3.17,0.37,11.2 +6.6,0.22,0.34,11.6,0.05,59.0,140.0,0.99526,3.22,0.4,10.8 +6.6,0.45,0.43,7.2,0.064,31.0,186.0,0.9954,3.12,0.44,9.4 +6.6,0.17,0.3,1.1,0.031,13.0,73.0,0.99095,3.17,0.58,11.0 +7.2,0.44,0.28,3.4,0.048,22.0,112.0,0.99188,3.21,0.37,11.3 +6.2,0.15,0.27,1.4,0.041,51.0,117.0,0.9909,3.28,0.38,11.2 +6.3,0.25,0.22,3.3,0.048,41.0,161.0,0.99256,3.16,0.5,10.5 +6.5,0.22,0.45,8.0,0.053,52.0,196.0,0.9959,3.23,0.48,9.1 +7.3,0.26,0.3,9.3,0.05,35.0,154.0,0.99581,3.21,0.5,10.4 +6.9,0.15,0.29,2.3,0.033,14.0,82.0,0.99132,3.1,0.58,11.2 +5.8,0.22,0.29,0.9,0.034,34.0,89.0,0.98936,3.14,0.36,11.1 +6.5,0.37,0.33,3.5,0.036,23.0,92.0,0.99136,3.18,0.38,11.2 +5.5,0.375,0.38,1.7,0.036,17.0,98.0,0.99142,3.29,0.39,10.5 +5.9,0.2,0.4,1.3,0.047,23.0,92.0,0.99232,3.2,0.45,10.0 +5.9,0.22,0.38,1.3,0.046,24.0,90.0,0.99232,3.2,0.47,10.0 +8.0,0.22,0.31,5.6,0.049,24.0,97.0,0.993,3.1,0.42,10.9 +6.5,0.22,0.29,7.4,0.028,16.0,87.0,0.99311,3.15,0.56,10.9 +6.9,0.15,0.29,2.3,0.033,14.0,82.0,0.99132,3.1,0.58,11.2 +5.8,0.2,0.34,1.0,0.035,40.0,86.0,0.98993,3.5,0.42,11.7 +6.6,0.31,0.07,1.5,0.033,55.0,144.0,0.99208,3.16,0.42,10.0 +7.7,0.43,0.37,10.0,0.169,22.0,210.0,0.99776,3.02,0.64,9.5 +6.7,0.24,0.29,14.9,0.053,55.0,136.0,0.99839,3.03,0.52,9.0 +7.3,0.23,0.34,9.3,0.052,19.0,86.0,0.99574,3.04,0.56,10.0 +7.9,0.2,0.39,1.0,0.041,37.0,154.0,0.99093,3.08,0.43,11.9 +5.3,0.16,0.39,1.0,0.028,40.0,101.0,0.99156,3.57,0.59,10.6 +6.4,0.21,0.28,5.9,0.047,29.0,101.0,0.99278,3.15,0.4,11.0 +6.9,0.33,0.26,5.0,0.027,46.0,143.0,0.9924,3.25,0.43,11.2 +5.6,0.18,0.58,1.25,0.034,29.0,129.0,0.98984,3.51,0.6,12.0 +6.6,0.29,0.31,3.9,0.027,39.0,96.0,0.99035,3.24,0.6,12.6 +6.9,0.33,0.26,5.0,0.027,46.0,143.0,0.9924,3.25,0.43,11.2 +6.6,0.21,0.36,0.8,0.034,48.0,113.0,0.99165,3.24,0.68,10.5 +7.3,0.21,0.33,1.0,0.037,66.0,144.0,0.9923,3.11,0.52,10.2 +6.4,0.21,0.28,5.9,0.047,29.0,101.0,0.99278,3.15,0.4,11.0 +5.1,0.11,0.32,1.6,0.028,12.0,90.0,0.99008,3.57,0.52,12.2 +6.5,0.15,0.32,1.3,0.036,19.0,76.0,0.98964,3.18,0.41,12.3 +5.3,0.16,0.39,1.0,0.028,40.0,101.0,0.99156,3.57,0.59,10.6 +5.6,0.19,0.46,1.1,0.032,33.0,115.0,0.9909,3.36,0.5,10.4 +5.6,0.18,0.58,1.25,0.034,29.0,129.0,0.98984,3.51,0.6,12.0 +6.7,0.48,0.32,1.4,0.021,22.0,121.0,0.9889,3.15,0.53,12.7 +6.2,0.23,0.23,1.2,0.018,18.0,128.0,0.99178,3.05,0.28,10.6 +6.0,0.17,0.29,5.0,0.028,25.0,108.0,0.99076,3.14,0.34,12.3 +6.7,0.48,0.32,1.4,0.021,22.0,121.0,0.9889,3.15,0.53,12.7 +6.7,0.15,0.38,1.7,0.037,20.0,84.0,0.99046,3.09,0.53,11.4 +4.2,0.17,0.36,1.8,0.029,93.0,161.0,0.98999,3.65,0.89,12.0 +5.8,0.21,0.32,1.6,0.045,38.0,95.0,0.98946,3.23,0.94,12.4 +5.4,0.23,0.36,1.5,0.03,74.0,121.0,0.98976,3.24,0.99,12.1 +6.7,0.15,0.38,1.7,0.037,20.0,84.0,0.99046,3.09,0.53,11.4 +6.4,0.22,0.31,13.9,0.04,57.0,135.0,0.99672,3.21,0.38,10.7 +6.5,0.15,0.55,5.9,0.045,75.0,162.0,0.99482,2.97,0.4,9.3 +5.9,0.32,0.33,2.1,0.027,35.0,138.0,0.98945,3.37,0.42,12.7 +5.7,0.37,0.3,1.1,0.029,24.0,88.0,0.98883,3.18,0.39,11.7 +7.9,0.25,0.35,6.7,0.039,22.0,64.0,0.99362,2.93,0.49,10.7 +7.2,0.21,0.28,2.7,0.033,38.0,94.0,0.99075,2.99,0.43,11.8 +7.0,0.24,0.3,6.7,0.039,37.0,125.0,0.99436,3.2,0.39,9.9 +6.8,0.475,0.33,3.95,0.047,16.0,81.0,0.98988,3.23,0.53,13.4 +7.0,0.28,0.32,7.75,0.032,30.0,114.0,0.99158,3.12,0.64,12.8 +6.9,0.4,0.3,10.6,0.033,24.0,87.0,0.99265,3.15,0.45,12.8 +6.6,0.41,0.31,1.6,0.042,18.0,101.0,0.99195,3.13,0.41,10.5 +6.4,0.2,0.28,2.5,0.032,24.0,84.0,0.99168,3.31,0.55,11.5 +8.5,0.22,0.34,0.7,0.04,5.0,25.0,0.9918,3.04,0.37,10.5 +8.4,0.36,0.36,11.1,0.032,21.0,132.0,0.99313,2.95,0.39,13.0 +5.2,0.285,0.29,5.15,0.035,64.0,138.0,0.9895,3.19,0.34,12.4 +6.9,0.2,0.3,4.7,0.041,40.0,148.0,0.9932,3.16,0.35,10.2 +6.7,0.42,0.46,9.7,0.054,67.0,234.0,0.99848,3.23,0.5,9.0 +6.2,0.16,0.34,1.7,0.038,85.0,153.0,0.9909,3.33,0.86,12.0 +6.4,0.125,0.36,1.4,0.044,22.0,68.0,0.99014,3.15,0.5,11.7 +6.4,0.44,0.26,2.0,0.054,20.0,180.0,0.9952,3.58,0.57,10.0 +7.0,0.31,0.39,7.5,0.055,42.0,218.0,0.99652,3.37,0.54,10.3 +6.7,0.42,0.46,9.7,0.054,67.0,234.0,0.99848,3.23,0.5,9.0 +8.6,0.18,0.28,0.8,0.032,25.0,78.0,0.99104,2.99,0.38,11.1 +6.2,0.21,0.26,13.1,0.05,59.0,150.0,0.99772,3.31,0.46,9.0 +6.1,0.16,0.37,1.1,0.031,37.0,97.0,0.9922,3.4,0.58,10.5 +6.5,0.22,0.32,2.2,0.028,36.0,92.0,0.99076,3.27,0.59,11.9 +6.2,0.36,0.14,8.9,0.036,38.0,155.0,0.99622,3.27,0.5,9.4 +5.7,0.21,0.25,1.1,0.035,26.0,81.0,0.9902,3.31,0.52,11.4 +6.4,0.25,0.32,0.9,0.034,40.0,114.0,0.99114,3.31,0.58,10.8 +7.6,0.31,0.26,1.7,0.073,40.0,157.0,0.9938,3.1,0.46,9.8 +6.6,0.26,0.46,6.9,0.047,59.0,183.0,0.99594,3.2,0.45,9.3 +5.7,0.21,0.25,1.1,0.035,26.0,81.0,0.9902,3.31,0.52,11.4 +6.2,0.2,0.31,1.0,0.031,22.0,73.0,0.99035,3.24,0.52,11.3 +6.2,0.18,0.3,1.0,0.031,23.0,73.0,0.99032,3.23,0.52,11.3 +6.1,0.37,0.2,7.6,0.031,49.0,170.0,0.99558,3.22,0.48,9.5 +6.2,0.36,0.14,8.9,0.036,38.0,155.0,0.99622,3.27,0.5,9.4 +6.5,0.22,0.32,2.2,0.028,36.0,92.0,0.99076,3.27,0.59,11.9 +7.7,0.18,0.3,1.2,0.046,49.0,199.0,0.99413,3.03,0.38,9.3 +6.9,0.14,0.38,1.0,0.041,22.0,81.0,0.99043,3.03,0.54,11.4 +6.9,0.14,0.38,1.0,0.041,22.0,81.0,0.99043,3.03,0.54,11.4 +6.0,0.44,0.26,3.1,0.053,57.0,128.0,0.98982,3.22,0.39,12.7 +7.1,0.36,0.4,1.95,0.033,26.0,118.0,0.98934,3.2,0.45,13.5 +5.7,0.28,0.28,2.2,0.019,15.0,65.0,0.9902,3.06,0.52,11.2 +6.4,0.16,0.32,8.75,0.038,38.0,118.0,0.99449,3.19,0.41,10.7 +7.4,0.28,0.4,11.9,0.032,13.0,92.0,0.99629,3.01,0.46,10.8 +6.7,0.39,0.31,2.7,0.054,27.0,202.0,0.9948,3.46,0.57,10.5 +6.5,0.44,0.47,5.45,0.014,44.0,137.0,0.98984,3.13,0.32,13.0 +6.9,0.22,0.31,6.3,0.029,41.0,131.0,0.99326,3.08,0.49,10.8 +6.6,0.22,0.29,14.4,0.046,39.0,118.0,0.99834,3.05,0.5,9.1 +7.7,0.25,0.3,7.8,0.038,67.0,196.0,0.99555,3.1,0.5,10.1 +5.2,0.155,0.33,1.6,0.028,13.0,59.0,0.98975,3.3,0.84,11.9 +7.0,0.31,0.31,9.1,0.036,45.0,140.0,0.99216,2.98,0.31,12.0 +7.0,0.31,0.31,9.1,0.036,45.0,140.0,0.99216,2.98,0.31,12.0 +6.6,0.22,0.29,14.4,0.046,39.0,118.0,0.99834,3.05,0.5,9.1 +5.6,0.21,0.4,1.3,0.041,81.0,147.0,0.9901,3.22,0.95,11.6 +5.2,0.155,0.33,1.6,0.028,13.0,59.0,0.98975,3.3,0.84,11.9 +6.4,0.25,0.32,11.3,0.038,69.0,192.0,0.99573,3.14,0.5,10.2 +6.9,0.22,0.31,6.3,0.029,41.0,131.0,0.99326,3.08,0.49,10.8 +5.3,0.21,0.29,0.7,0.028,11.0,66.0,0.99215,3.3,0.4,9.8 +7.1,0.27,0.28,1.25,0.023,3.0,89.0,0.98993,2.95,0.3,11.4 +5.2,0.17,0.27,0.7,0.03,11.0,68.0,0.99218,3.3,0.41,9.8 +7.7,0.25,0.3,7.8,0.038,67.0,196.0,0.99555,3.1,0.5,10.1 +7.0,0.12,0.29,10.3,0.039,41.0,98.0,0.99564,3.19,0.38,9.8 +7.0,0.12,0.29,10.3,0.039,41.0,98.0,0.99564,3.19,0.38,9.8 +7.1,0.29,0.34,7.8,0.036,49.0,128.0,0.99397,3.21,0.4,10.7 +7.2,0.3,0.3,8.7,0.022,14.0,111.0,0.99576,3.11,0.61,10.6 +6.8,0.26,0.46,8.3,0.037,49.0,173.0,0.99601,3.17,0.47,9.3 +7.0,0.12,0.29,10.3,0.039,41.0,98.0,0.99564,3.19,0.38,9.8 +7.1,0.29,0.34,7.8,0.036,49.0,128.0,0.99397,3.21,0.4,10.7 +4.9,0.33,0.31,1.2,0.016,39.0,150.0,0.98713,3.33,0.59,14.0 +5.1,0.29,0.28,8.3,0.026,27.0,107.0,0.99308,3.36,0.37,11.0 +5.1,0.29,0.28,8.3,0.026,27.0,107.0,0.99308,3.36,0.37,11.0 +6.8,0.26,0.48,6.2,0.049,55.0,182.0,0.99582,3.21,0.45,9.4 +6.0,0.28,0.52,5.0,0.078,30.0,139.0,0.99494,3.1,0.36,9.0 +6.0,0.28,0.25,1.8,0.042,8.0,108.0,0.9929,3.08,0.55,9.0 +7.2,0.2,0.22,1.6,0.044,17.0,101.0,0.99471,3.37,0.53,10.0 +6.1,0.27,0.25,1.8,0.041,9.0,109.0,0.9929,3.08,0.54,9.0 +6.0,0.28,0.25,1.8,0.042,8.0,108.0,0.9929,3.08,0.55,9.0 +6.4,0.29,0.3,2.9,0.036,25.0,79.0,0.99037,3.29,0.6,12.4 +7.4,0.35,0.24,6.0,0.042,28.0,123.0,0.99304,3.14,0.44,11.3 +8.1,0.12,0.38,0.9,0.034,36.0,86.0,0.99026,2.8,0.55,12.0 +6.4,0.12,0.3,1.1,0.031,37.0,94.0,0.98986,3.01,0.56,11.7 +7.2,0.2,0.22,1.6,0.044,17.0,101.0,0.99471,3.37,0.53,10.0 +7.3,0.4,0.26,5.45,0.016,26.0,90.0,0.98951,2.84,0.54,13.2 +7.7,0.11,0.34,14.05,0.04,41.0,114.0,0.99634,3.07,0.59,11.0 +6.9,0.23,0.41,8.0,0.03,30.0,114.0,0.99368,3.22,0.54,11.0 +6.9,0.38,0.38,13.1,0.112,14.0,94.0,0.99792,3.02,0.48,9.2 +7.5,0.38,0.29,4.9,0.021,38.0,113.0,0.99026,3.08,0.48,13.0 +5.8,0.19,0.24,1.3,0.044,38.0,128.0,0.99362,3.77,0.6,10.6 +5.5,0.34,0.26,2.2,0.021,31.0,119.0,0.98919,3.55,0.49,13.0 +6.6,0.23,0.3,14.9,0.051,33.0,118.0,0.99835,3.04,0.54,9.0 +6.6,0.23,0.3,14.9,0.051,33.0,118.0,0.99835,3.04,0.54,9.0 +8.4,0.31,0.31,0.95,0.021,52.0,148.0,0.99038,2.93,0.32,11.5 +6.7,0.2,0.3,1.4,0.025,17.0,76.0,0.99104,3.11,0.44,11.0 +8.4,0.31,0.31,0.95,0.021,52.0,148.0,0.99038,2.93,0.32,11.5 +7.3,0.26,0.24,1.7,0.05,10.0,112.0,0.99286,3.11,0.43,9.9 +6.3,0.22,0.22,5.6,0.039,31.0,128.0,0.99296,3.12,0.46,10.4 +6.6,0.23,0.3,14.9,0.051,33.0,118.0,0.99835,3.04,0.54,9.0 +7.5,0.19,0.4,7.1,0.056,50.0,110.0,0.9954,3.06,0.52,9.9 +8.0,0.14,0.33,1.2,0.045,71.0,162.0,0.9914,3.07,0.47,11.0 +6.8,0.32,0.39,9.6,0.026,34.0,124.0,0.99286,3.18,0.35,12.1 +6.6,0.23,0.2,11.4,0.044,45.0,131.0,0.99604,2.96,0.51,9.7 +6.6,0.23,0.2,11.4,0.044,45.0,131.0,0.99604,2.96,0.51,9.7 +6.7,0.36,0.26,7.9,0.034,39.0,123.0,0.99119,2.99,0.3,12.2 +6.1,0.38,0.42,5.0,0.016,31.0,113.0,0.99007,3.15,0.31,12.4 +8.5,0.23,0.28,11.1,0.033,30.0,97.0,0.99507,3.03,0.39,10.5 +7.0,0.2,0.31,8.0,0.05,29.0,213.0,0.99596,3.28,0.57,10.4 +6.0,0.26,0.32,3.8,0.029,48.0,180.0,0.99011,3.15,0.34,12.0 +6.9,0.3,0.3,10.55,0.037,4.0,28.0,0.99184,3.07,0.32,12.7 +6.7,0.18,0.28,10.2,0.039,29.0,115.0,0.99469,3.11,0.45,10.9 +6.7,0.18,0.28,10.2,0.039,29.0,115.0,0.99469,3.11,0.45,10.9 +6.8,0.18,0.28,9.8,0.039,29.0,113.0,0.99406,3.11,0.45,10.9 +7.2,0.19,0.31,6.3,0.034,17.0,103.0,0.99305,3.15,0.52,11.4 +6.2,0.16,0.32,1.1,0.036,74.0,184.0,0.99096,3.22,0.41,11.0 +5.0,0.27,0.32,4.5,0.032,58.0,178.0,0.98956,3.45,0.31,12.6 +6.3,0.37,0.28,6.3,0.034,45.0,152.0,0.9921,3.29,0.46,11.6 +6.6,0.2,0.27,10.9,0.038,29.0,130.0,0.99496,3.11,0.44,10.5 +6.8,0.18,0.28,9.8,0.039,29.0,113.0,0.99406,3.11,0.45,10.9 +6.8,0.18,0.28,9.8,0.039,29.0,113.0,0.99406,3.11,0.45,10.9 +6.6,0.28,0.34,0.8,0.037,42.0,119.0,0.9888,3.03,0.37,12.5 +6.5,0.35,0.36,0.8,0.034,32.0,111.0,0.98942,3.11,0.5,12.1 +6.9,0.25,0.33,1.2,0.035,35.0,158.0,0.99082,3.02,0.58,11.3 +6.0,0.32,0.3,1.3,0.025,18.0,112.0,0.98802,3.07,0.64,13.3 +6.8,0.18,0.28,9.8,0.039,29.0,113.0,0.99406,3.11,0.45,10.9 +6.7,0.18,0.28,10.2,0.039,29.0,115.0,0.99469,3.11,0.45,10.9 +6.6,0.2,0.27,10.9,0.038,29.0,130.0,0.99496,3.11,0.44,10.5 +6.3,0.37,0.28,6.3,0.034,45.0,152.0,0.9921,3.29,0.46,11.6 +7.2,0.19,0.31,6.3,0.034,17.0,103.0,0.99305,3.15,0.52,11.4 +6.3,0.18,0.36,1.2,0.034,26.0,111.0,0.99074,3.16,0.51,11.0 +6.9,0.3,0.36,0.9,0.037,40.0,156.0,0.98968,3.08,0.36,12.1 +6.2,0.16,0.32,1.1,0.036,74.0,184.0,0.99096,3.22,0.41,11.0 +5.0,0.27,0.32,4.5,0.032,58.0,178.0,0.98956,3.45,0.31,12.6 +5.0,0.3,0.33,3.7,0.03,54.0,173.0,0.9887,3.36,0.3,13.0 +6.5,0.2,0.5,18.1,0.054,50.0,221.0,0.99941,2.94,0.64,8.8 +6.7,0.25,0.31,1.35,0.061,30.5,218.0,0.99388,3.16,0.53,9.5 +6.6,0.22,0.36,5.5,0.029,30.0,105.0,0.99206,3.2,0.47,11.8 +6.8,0.25,0.37,3.1,0.026,29.0,93.0,0.99035,3.14,0.45,12.2 +7.0,0.13,0.37,12.85,0.042,36.0,105.0,0.99581,3.05,0.55,10.7 +7.0,0.45,0.34,19.8,0.04,12.0,67.0,0.9976,3.07,0.38,11.0 +7.2,0.32,0.3,8.25,0.02,14.0,104.0,0.99362,2.99,0.44,11.4 +7.0,0.13,0.37,12.85,0.042,36.0,105.0,0.99581,3.05,0.55,10.7 +5.9,0.34,0.3,3.8,0.035,57.0,135.0,0.99016,3.09,0.34,12.0 +6.8,0.22,0.31,6.9,0.037,33.0,121.0,0.99176,3.02,0.39,11.9 +7.2,0.32,0.3,8.25,0.02,14.0,104.0,0.99362,2.99,0.44,11.4 +8.4,0.32,0.35,11.7,0.029,3.0,46.0,0.99439,3.02,0.34,11.8 +6.8,0.27,0.29,4.6,0.046,6.0,88.0,0.99458,3.34,0.48,10.6 +8.0,0.74,0.21,4.0,0.05,24.0,133.0,0.99418,3.06,0.38,9.7 +7.0,0.45,0.34,19.8,0.04,12.0,67.0,0.9976,3.07,0.38,11.0 +7.0,0.13,0.37,12.85,0.042,36.0,105.0,0.99581,3.05,0.55,10.7 +5.4,0.22,0.29,1.2,0.045,69.0,152.0,0.99178,3.76,0.63,11.0 +8.4,0.22,0.3,8.9,0.024,17.0,118.0,0.99456,2.99,0.34,10.5 +7.4,0.32,0.22,11.7,0.035,44.0,150.0,0.99578,3.1,0.45,10.4 +7.5,0.18,0.37,6.2,0.05,21.0,138.0,0.99546,3.2,0.55,10.5 +7.1,0.47,0.29,14.8,0.024,22.0,142.0,0.99518,3.12,0.48,12.0 +7.1,0.47,0.29,14.8,0.024,22.0,142.0,0.99518,3.12,0.48,12.0 +5.8,0.19,0.25,10.8,0.042,33.0,124.0,0.99646,3.22,0.41,9.2 +6.7,0.14,0.46,1.6,0.036,15.0,92.0,0.99264,3.37,0.49,10.9 +6.8,0.24,0.38,8.3,0.045,50.0,185.0,0.99578,3.15,0.5,9.5 +6.9,0.25,0.47,8.4,0.042,36.0,156.0,0.99604,3.15,0.55,9.4 +6.0,0.24,0.33,2.5,0.026,31.0,85.0,0.99014,3.13,0.5,11.3 +6.8,0.29,0.34,3.5,0.054,26.0,189.0,0.99489,3.42,0.58,10.4 +6.3,0.33,0.42,17.2,0.037,57.0,170.0,0.99884,3.26,0.57,9.4 +6.5,0.23,0.45,2.1,0.027,43.0,104.0,0.99054,3.02,0.52,11.3 +6.3,0.27,0.29,12.2,0.044,59.0,196.0,0.99782,3.14,0.4,8.8 +6.3,0.2,0.37,11.8,0.045,58.0,130.0,0.99519,3.2,0.35,10.8 +6.2,0.33,0.41,16.8,0.037,58.0,173.0,0.99882,3.25,0.57,9.4 +6.3,0.33,0.42,17.2,0.037,57.0,170.0,0.99884,3.26,0.57,9.4 +7.2,0.21,1.0,1.1,0.154,46.0,114.0,0.9931,2.95,0.43,9.2 +6.0,0.27,0.3,14.7,0.044,15.0,144.0,0.99666,3.12,0.53,10.3 +5.7,0.12,0.26,5.5,0.034,21.0,99.0,0.99324,3.09,0.57,9.9 +6.9,0.24,0.37,6.1,0.027,38.0,112.0,0.99086,3.19,0.34,12.4 +7.7,0.18,0.53,1.2,0.041,42.0,167.0,0.9908,3.11,0.44,11.9 +7.1,0.17,0.43,1.3,0.023,33.0,132.0,0.99067,3.11,0.56,11.7 +7.5,0.33,0.38,8.7,0.126,49.0,199.0,0.99711,2.98,0.57,9.4 +6.2,0.255,0.24,1.7,0.039,138.5,272.0,0.99452,3.53,0.53,9.6 +7.5,0.33,0.38,8.7,0.126,49.0,199.0,0.99711,2.98,0.57,9.4 +5.6,0.2,0.66,10.2,0.043,78.0,175.0,0.9945,2.98,0.43,10.4 +7.6,0.17,0.36,4.5,0.042,26.0,102.0,0.99427,3.09,0.47,9.5 +5.8,0.15,0.31,5.9,0.036,7.0,73.0,0.99152,3.2,0.43,11.9 +6.3,0.25,0.44,1.7,0.024,36.0,116.0,0.98935,3.18,0.4,12.5 +6.9,0.28,0.41,1.4,0.016,6.0,55.0,0.98876,3.16,0.4,13.4 +7.2,0.27,0.37,5.4,0.026,27.0,114.0,0.99174,3.13,0.84,12.7 +6.2,0.25,0.38,7.9,0.045,54.0,208.0,0.99572,3.17,0.46,9.1 +8.5,0.19,0.48,1.1,0.026,23.0,58.0,0.99184,2.9,0.5,10.5 +6.2,0.25,0.54,7.0,0.046,58.0,176.0,0.99454,3.19,0.7,10.4 +6.2,0.25,0.54,7.0,0.046,58.0,176.0,0.99454,3.19,0.7,10.4 +6.8,0.28,0.43,7.6,0.03,30.0,110.0,0.99164,3.08,0.59,12.5 +6.2,0.25,0.54,7.0,0.046,58.0,176.0,0.99454,3.19,0.7,10.4 +7.4,0.21,0.8,12.3,0.038,77.0,183.0,0.99778,2.95,0.48,9.0 +7.0,0.15,0.38,15.3,0.045,54.0,120.0,0.9975,3.18,0.42,9.8 +7.4,0.21,0.8,12.3,0.038,77.0,183.0,0.99778,2.95,0.48,9.0 +7.3,0.28,0.42,1.2,0.033,29.0,142.0,0.99205,3.17,0.43,10.7 +6.1,0.18,0.38,2.3,0.033,28.0,111.0,0.98962,3.16,0.49,12.4 +7.0,0.53,0.43,6.1,0.029,6.0,76.0,0.99118,3.08,0.5,12.5 +6.8,0.28,0.43,7.6,0.03,30.0,110.0,0.99164,3.08,0.59,12.5 +6.5,0.36,0.38,10.2,0.028,20.0,82.0,0.99274,3.1,0.43,12.1 +7.5,0.25,0.47,4.1,0.041,95.0,163.0,0.99184,2.92,0.59,11.3 +6.7,0.24,0.41,2.9,0.039,48.0,122.0,0.99052,3.25,0.43,12.0 +6.6,0.25,0.33,8.5,0.042,29.0,141.0,0.99546,3.28,0.6,10.4 +6.4,0.15,0.4,1.5,0.042,23.0,87.0,0.98972,3.11,0.46,12.2 +6.3,0.28,0.3,3.1,0.039,24.0,115.0,0.9942,3.05,0.43,8.6 +6.2,0.25,0.38,7.9,0.045,54.0,208.0,0.99572,3.17,0.46,9.1 +7.1,0.28,0.35,3.5,0.028,35.0,91.0,0.99022,2.96,0.33,12.1 +6.6,0.35,0.34,4.9,0.032,9.0,125.0,0.99253,3.32,0.81,12.0 +8.5,0.19,0.48,1.1,0.026,23.0,58.0,0.99184,2.9,0.5,10.5 +6.2,0.25,0.54,7.0,0.046,58.0,176.0,0.99454,3.19,0.7,10.4 +6.0,0.35,0.51,1.2,0.029,10.0,102.0,0.9903,3.46,0.42,11.9 +5.8,0.31,0.32,4.5,0.024,28.0,94.0,0.98906,3.25,0.52,13.7 +6.6,0.17,0.35,2.6,0.03,33.0,78.0,0.99146,3.22,0.72,11.3 +8.5,0.23,0.4,9.9,0.036,24.0,88.0,0.9951,3.02,0.42,10.5 +5.8,0.31,0.32,4.5,0.024,28.0,94.0,0.98906,3.25,0.52,13.7 +6.1,0.2,0.34,9.5,0.041,38.0,201.0,0.995,3.14,0.44,10.1 +6.3,0.37,0.37,1.5,0.024,12.0,76.0,0.98876,2.94,0.39,12.3 +6.2,0.36,0.38,3.2,0.031,20.0,89.0,0.98956,3.06,0.33,12.0 +6.6,0.17,0.35,2.6,0.03,33.0,78.0,0.99146,3.22,0.72,11.3 +6.3,0.28,0.47,11.2,0.04,61.0,183.0,0.99592,3.12,0.51,9.5 +7.6,0.27,0.52,3.2,0.043,28.0,152.0,0.99129,3.02,0.53,11.4 +7.0,0.25,0.45,2.3,0.045,40.0,118.0,0.99064,3.16,0.48,11.9 +9.7,0.24,0.49,4.9,0.032,3.0,18.0,0.99368,2.85,0.54,10.0 +9.7,0.24,0.49,4.9,0.032,3.0,18.0,0.99368,2.85,0.54,10.0 +6.8,0.13,0.39,1.4,0.034,19.0,102.0,0.99121,3.23,0.6,11.3 +6.6,0.78,0.5,1.5,0.045,30.0,133.0,0.99104,3.25,0.48,11.7 +5.1,0.33,0.27,6.7,0.022,44.0,129.0,0.99221,3.36,0.39,11.0 +6.7,0.34,0.4,2.1,0.033,34.0,111.0,0.98924,2.97,0.48,12.2 +6.7,0.14,0.51,4.3,0.028,57.0,124.0,0.99176,2.91,0.54,10.7 +7.0,0.26,0.34,10.9,0.038,25.0,84.0,0.99432,3.11,0.34,10.9 +6.5,0.29,0.26,7.0,0.04,18.0,113.0,0.99366,3.17,0.38,10.2 +7.0,0.25,0.45,2.3,0.045,40.0,118.0,0.99064,3.16,0.48,11.9 +7.6,0.21,0.49,2.5,0.047,20.0,130.0,0.99178,3.15,0.48,11.1 +7.7,0.26,0.51,2.6,0.045,26.0,159.0,0.99126,3.0,0.5,11.2 +7.6,0.27,0.52,3.2,0.043,28.0,152.0,0.99129,3.02,0.53,11.4 +7.7,0.25,0.49,2.5,0.047,31.0,169.0,0.99252,3.07,0.57,10.6 +7.6,0.35,0.46,14.7,0.047,33.0,151.0,0.99709,3.03,0.53,10.3 +6.9,0.3,0.36,4.5,0.054,31.0,203.0,0.99513,3.4,0.57,10.4 +6.7,0.24,0.46,2.2,0.033,19.0,111.0,0.99045,3.1,0.62,11.9 +6.5,0.23,0.39,1.9,0.036,41.0,98.0,0.99,3.19,0.43,11.9 +7.6,0.23,0.34,1.6,0.043,24.0,129.0,0.99305,3.12,0.7,10.4 +6.5,0.24,0.39,17.3,0.052,22.0,126.0,0.99888,3.11,0.47,9.2 +6.3,0.17,0.32,4.2,0.04,37.0,117.0,0.99182,3.24,0.43,11.3 +6.3,0.17,0.32,4.2,0.04,37.0,117.0,0.99182,3.24,0.43,11.3 +6.7,0.21,0.37,2.5,0.034,35.0,89.0,0.9913,3.25,0.5,11.0 +6.5,0.23,0.39,1.9,0.036,41.0,98.0,0.99,3.19,0.43,11.9 +5.9,0.28,0.39,1.4,0.031,47.0,147.0,0.98836,3.08,0.64,12.9 +5.9,0.19,0.37,0.8,0.027,3.0,21.0,0.9897,3.09,0.31,10.8 +6.2,0.25,0.42,8.0,0.049,53.0,206.0,0.99586,3.16,0.47,9.1 +7.6,0.23,0.34,1.6,0.043,24.0,129.0,0.99305,3.12,0.7,10.4 +5.6,0.18,0.27,1.7,0.03,31.0,103.0,0.98892,3.35,0.37,12.9 +5.5,0.18,0.22,5.5,0.037,10.0,86.0,0.99156,3.46,0.44,12.2 +6.5,0.24,0.39,17.3,0.052,22.0,126.0,0.99888,3.11,0.47,9.2 +7.4,0.23,0.38,8.6,0.052,41.0,150.0,0.99534,3.06,0.46,10.3 +7.2,0.17,0.37,6.9,0.059,47.0,128.0,0.99322,3.08,0.46,11.0 +7.6,0.3,0.38,2.1,0.043,10.0,98.0,0.99296,3.17,0.65,11.0 +5.0,0.24,0.21,2.2,0.039,31.0,100.0,0.99098,3.69,0.62,11.7 +6.1,0.21,0.38,1.5,0.039,37.0,122.0,0.98972,3.2,0.43,12.0 +6.5,0.33,0.38,2.5,0.047,30.0,148.0,0.98964,3.17,0.43,12.7 +6.3,0.35,0.26,17.6,0.061,59.0,198.0,0.99918,3.11,0.49,8.8 +6.3,0.17,0.32,4.2,0.04,37.0,117.0,0.99182,3.24,0.43,11.3 +6.6,0.25,0.35,2.9,0.034,38.0,121.0,0.99008,3.19,0.4,12.8 +6.5,0.16,0.33,4.8,0.043,45.0,114.0,0.992,3.18,0.44,11.2 +6.6,0.39,0.39,11.9,0.057,51.0,221.0,0.99851,3.26,0.51,8.9 +5.6,0.19,0.27,0.9,0.04,52.0,103.0,0.99026,3.5,0.39,11.2 +6.2,0.25,0.39,1.3,0.051,42.0,135.0,0.9906,3.23,0.4,11.1 +6.9,0.22,0.43,6.4,0.042,34.0,115.0,0.99293,3.05,0.51,10.8 +6.2,0.19,0.29,4.3,0.045,33.0,126.0,0.99658,3.18,0.42,9.3 +6.6,0.39,0.39,11.9,0.057,51.0,221.0,0.99851,3.26,0.51,8.9 +5.9,0.33,0.32,8.1,0.038,9.0,34.0,0.9911,3.22,0.36,12.7 +7.8,0.17,0.5,1.3,0.045,35.0,140.0,0.9904,3.16,0.4,12.0 +5.5,0.19,0.27,0.9,0.04,52.0,103.0,0.99026,3.5,0.39,11.2 +6.2,0.23,0.36,17.2,0.039,37.0,130.0,0.99946,3.23,0.43,8.8 +6.2,0.23,0.36,17.2,0.039,37.0,130.0,0.99946,3.23,0.43,8.8 +6.2,0.23,0.36,17.2,0.039,37.0,130.0,0.99946,3.23,0.43,8.8 +7.2,0.32,0.4,8.7,0.038,45.0,154.0,0.99568,3.2,0.47,10.4 +6.2,0.23,0.36,17.2,0.039,37.0,130.0,0.99946,3.23,0.43,8.8 +7.2,0.32,0.4,8.7,0.038,45.0,154.0,0.99568,3.2,0.47,10.4 +5.8,0.39,0.47,7.5,0.027,12.0,88.0,0.9907,3.38,0.45,14.0 +6.2,0.23,0.36,17.2,0.039,37.0,130.0,0.99946,3.23,0.43,8.8 +7.6,0.25,1.23,4.6,0.035,51.0,294.0,0.99018,3.03,0.43,13.1 +5.8,0.29,0.33,3.7,0.029,30.0,88.0,0.98994,3.25,0.42,12.3 +7.2,0.4,0.38,2.2,0.03,40.0,109.0,0.99075,3.27,0.46,12.6 +6.8,0.39,0.34,7.4,0.02,38.0,133.0,0.99212,3.18,0.44,12.0 +6.1,0.17,0.42,15.1,0.033,28.0,124.0,0.99684,2.87,0.47,9.5 +6.8,0.39,0.34,7.4,0.02,38.0,133.0,0.99212,3.18,0.44,12.0 +7.1,0.36,0.37,4.8,0.019,39.0,114.0,0.99036,3.08,0.49,12.7 +6.9,0.19,0.32,7.9,0.042,30.0,130.0,0.99456,3.4,0.39,10.5 +6.5,0.34,0.46,1.0,0.023,6.0,80.0,0.98865,3.15,0.54,12.9 +6.1,0.17,0.42,15.1,0.033,28.0,124.0,0.99684,2.87,0.47,9.5 +6.8,0.39,0.34,7.4,0.02,38.0,133.0,0.99212,3.18,0.44,12.0 +7.1,0.36,0.37,4.8,0.019,39.0,114.0,0.99036,3.08,0.49,12.7 +7.8,0.3,0.36,4.6,0.024,20.0,198.0,0.99222,3.06,0.66,11.9 +6.1,0.68,0.52,1.4,0.037,32.0,123.0,0.99022,3.24,0.45,12.0 +5.2,0.34,0.37,6.2,0.031,42.0,133.0,0.99076,3.25,0.41,12.5 +5.6,0.28,0.4,6.1,0.034,36.0,118.0,0.99144,3.21,0.43,12.1 +6.2,0.19,0.38,5.1,0.019,22.0,82.0,0.98961,3.05,0.36,12.5 +5.7,0.16,0.26,6.3,0.043,28.0,113.0,0.9936,3.06,0.58,9.9 +7.6,0.17,0.46,0.9,0.036,63.0,147.0,0.99126,3.02,0.41,10.7 +7.3,0.2,0.39,2.3,0.048,24.0,87.0,0.99044,2.94,0.35,12.0 +6.7,0.33,0.36,6.6,0.042,34.0,116.0,0.99123,2.97,0.31,12.2 +6.7,0.33,0.34,7.5,0.036,39.0,124.0,0.99123,2.99,0.32,12.4 +6.9,0.36,0.35,8.6,0.038,37.0,125.0,0.9916,3.0,0.32,12.4 +7.8,0.21,0.34,11.9,0.039,55.0,140.0,0.9959,3.02,0.31,10.3 +7.3,0.2,0.39,2.3,0.048,24.0,87.0,0.99044,2.94,0.35,12.0 +5.6,0.41,0.22,7.1,0.05,44.0,154.0,0.9931,3.3,0.4,10.5 +7.6,0.15,0.35,4.3,0.051,23.0,98.0,0.99422,3.1,0.44,9.5 +8.5,0.2,0.4,1.1,0.046,31.0,106.0,0.99194,3.0,0.35,10.5 +6.5,0.24,0.38,1.0,0.027,31.0,90.0,0.98926,3.24,0.36,12.3 +8.3,0.16,0.37,7.9,0.025,38.0,107.0,0.99306,2.93,0.37,11.9 +5.5,0.12,0.33,1.0,0.038,23.0,131.0,0.99164,3.25,0.45,9.8 +6.5,0.24,0.38,1.0,0.027,31.0,90.0,0.98926,3.24,0.36,12.3 +6.2,0.1,0.41,1.0,0.04,17.0,76.0,0.98988,3.14,0.56,11.4 +6.5,0.21,0.4,7.3,0.041,49.0,115.0,0.99268,3.21,0.43,11.0 +8.7,0.3,0.59,1.7,0.046,10.0,70.0,0.99373,3.06,0.56,10.8 +6.7,0.18,0.37,1.3,0.027,42.0,125.0,0.98939,3.24,0.37,12.8 +7.0,0.17,0.36,6.4,0.055,42.0,123.0,0.99318,3.11,0.5,11.0 +6.6,0.19,0.33,1.8,0.035,42.0,148.0,0.99196,3.15,0.36,10.2 +5.8,0.28,0.3,1.5,0.026,31.0,114.0,0.98952,3.32,0.6,12.5 +7.6,0.24,0.44,3.8,0.037,49.0,146.0,0.9911,3.06,0.37,11.6 +8.3,0.16,0.37,7.9,0.025,38.0,107.0,0.99306,2.93,0.37,11.9 +5.5,0.12,0.33,1.0,0.038,23.0,131.0,0.99164,3.25,0.45,9.8 +5.7,0.16,0.32,1.2,0.036,7.0,89.0,0.99111,3.26,0.48,11.0 +7.0,0.21,0.42,5.3,0.037,36.0,123.0,0.99321,3.14,0.52,10.9 +6.4,0.22,0.38,9.1,0.044,35.0,127.0,0.99326,2.97,0.3,11.0 +7.9,0.34,0.44,6.5,0.027,47.0,126.0,0.99124,2.96,0.37,12.5 +6.4,0.22,0.38,9.1,0.044,35.0,127.0,0.99326,2.97,0.3,11.0 +6.8,0.21,0.4,6.3,0.032,40.0,121.0,0.99214,3.18,0.53,12.0 +5.2,0.31,0.36,5.1,0.031,46.0,145.0,0.9897,3.14,0.31,12.4 +7.9,0.34,0.44,6.5,0.027,47.0,126.0,0.99124,2.96,0.37,12.5 +5.6,0.42,0.34,2.4,0.022,34.0,97.0,0.98915,3.22,0.38,12.8 +6.4,0.22,0.38,9.1,0.044,35.0,127.0,0.99326,2.97,0.3,11.0 +6.8,0.28,0.34,7.5,0.035,34.0,177.0,0.99692,3.33,0.43,9.1 +6.8,0.45,0.36,5.0,0.033,28.0,156.0,0.991,3.11,0.4,12.4 +6.6,0.29,0.39,6.75,0.031,22.0,98.0,0.9913,3.15,0.8,12.9 +6.8,0.21,0.42,1.2,0.045,24.0,126.0,0.99234,3.09,0.87,10.9 +6.8,0.25,0.24,1.6,0.045,39.0,164.0,0.99402,3.53,0.58,10.8 +6.4,0.21,0.34,16.05,0.04,56.0,142.0,0.99678,3.11,0.38,10.6 +5.8,0.33,0.23,5.0,0.053,29.0,106.0,0.99458,3.13,0.52,9.0 +8.2,0.3,0.44,12.4,0.043,52.0,154.0,0.99452,3.04,0.33,12.0 +6.4,0.24,0.32,0.95,0.041,23.0,131.0,0.99033,3.25,0.35,11.8 +7.5,0.18,0.45,4.6,0.041,67.0,158.0,0.9927,3.01,0.38,10.6 +5.2,0.335,0.2,1.7,0.033,17.0,74.0,0.99002,3.34,0.48,12.3 +7.1,0.14,0.33,1.0,0.104,20.0,54.0,0.99057,3.19,0.64,11.5 +7.2,0.13,0.46,1.3,0.044,48.0,111.0,0.99127,2.97,0.45,11.1 +5.8,0.33,0.23,5.0,0.053,29.0,106.0,0.99458,3.13,0.52,9.0 +6.5,0.29,0.25,2.5,0.142,8.0,111.0,0.9927,3.0,0.44,9.9 +6.2,0.35,0.31,2.6,0.036,37.0,92.0,0.98938,3.27,0.53,12.8 +9.0,0.38,0.53,2.1,0.102,19.0,76.0,0.99001,2.93,0.57,12.9 +6.6,0.24,0.38,12.75,0.034,8.0,74.0,0.99386,3.1,0.57,12.9 +6.6,0.16,0.34,1.1,0.037,41.0,115.0,0.9899,3.01,0.68,12.0 +8.2,0.3,0.44,12.4,0.043,52.0,154.0,0.99452,3.04,0.33,12.0 +5.7,0.15,0.28,3.7,0.045,57.0,151.0,0.9913,3.22,0.27,11.2 +6.6,0.33,0.4,2.65,0.041,35.0,86.0,0.98916,3.11,0.39,13.3 +5.7,0.2,0.3,2.5,0.046,38.0,125.0,0.99276,3.34,0.5,9.9 +6.8,0.27,0.37,8.2,0.055,52.0,192.0,0.99586,3.11,0.52,9.5 +6.8,0.27,0.42,7.3,0.054,58.0,200.0,0.99556,3.12,0.49,9.4 +6.2,0.2,0.26,1.1,0.047,42.0,119.0,0.99158,3.48,0.6,11.0 +6.7,0.13,0.57,6.6,0.056,60.0,150.0,0.99548,2.96,0.43,9.4 +6.8,0.21,0.37,7.0,0.038,27.0,107.0,0.99206,2.98,0.82,11.5 +6.7,0.31,0.32,14.5,0.038,6.0,79.0,0.99412,3.14,0.34,12.5 +6.2,0.2,0.29,11.8,0.035,21.0,93.0,0.99364,3.18,0.34,11.9 +6.6,0.25,0.34,3.0,0.054,22.0,141.0,0.99338,3.26,0.47,10.4 +5.7,0.15,0.28,3.7,0.045,57.0,151.0,0.9913,3.22,0.27,11.2 +6.9,0.22,0.39,6.0,0.035,44.0,141.0,0.99123,3.11,0.33,12.5 +6.4,0.23,0.35,4.6,0.039,43.0,147.0,0.99216,3.18,0.4,11.0 +7.6,0.27,0.29,2.5,0.059,37.0,115.0,0.99328,3.09,0.37,9.8 +6.6,0.34,0.24,3.3,0.034,29.0,99.0,0.99031,3.1,0.4,12.3 +6.4,0.16,0.42,1.0,0.036,29.0,113.0,0.9908,3.18,0.52,11.0 +5.8,0.3,0.42,1.1,0.036,19.0,113.0,0.98871,3.1,0.46,12.6 +7.0,0.29,0.35,1.4,0.036,42.0,109.0,0.99119,3.31,0.62,11.6 +6.6,0.34,0.24,3.3,0.034,29.0,99.0,0.99031,3.1,0.4,12.3 +6.7,0.21,0.36,8.55,0.02,20.0,86.0,0.99146,3.19,0.22,13.4 +7.6,0.27,0.29,2.5,0.059,37.0,115.0,0.99328,3.09,0.37,9.8 +6.8,0.22,0.41,6.7,0.034,39.0,116.0,0.99245,3.18,0.46,11.5 +7.7,0.27,0.49,3.8,0.037,46.0,139.0,0.99116,3.04,0.38,11.6 +6.4,0.25,0.37,4.5,0.039,41.0,147.0,0.9921,3.18,0.4,11.1 +6.4,0.23,0.35,4.6,0.039,43.0,147.0,0.99216,3.18,0.4,11.0 +6.7,0.13,0.45,4.2,0.043,52.0,131.0,0.99162,3.06,0.54,11.3 +6.7,0.24,0.37,11.3,0.043,64.0,173.0,0.99632,3.08,0.53,9.9 +7.1,0.26,0.37,5.5,0.025,31.0,105.0,0.99082,3.06,0.33,12.6 +5.3,0.3,0.16,4.2,0.029,37.0,100.0,0.9905,3.3,0.36,11.8 +7.1,0.38,0.4,2.2,0.042,54.0,201.0,0.99177,3.03,0.5,11.4 +7.4,0.19,0.31,14.5,0.045,39.0,193.0,0.9986,3.1,0.5,9.2 +7.4,0.19,0.31,14.5,0.045,39.0,193.0,0.9986,3.1,0.5,9.2 +7.4,0.19,0.31,14.5,0.045,39.0,193.0,0.9986,3.1,0.5,9.2 +7.4,0.19,0.31,14.5,0.045,39.0,193.0,0.9986,3.1,0.5,9.2 +7.4,0.19,0.31,14.5,0.045,39.0,193.0,0.9986,3.1,0.5,9.2 +7.4,0.19,0.31,14.5,0.045,39.0,193.0,0.9986,3.1,0.5,9.2 +6.3,0.32,0.32,1.5,0.03,24.0,101.0,0.98923,3.21,0.42,13.0 +7.6,0.19,0.32,18.75,0.047,32.0,193.0,1.00014,3.1,0.5,9.3 +6.5,0.26,0.31,3.6,0.03,36.0,92.0,0.99026,3.22,0.62,12.6 +5.9,0.24,0.12,1.4,0.035,60.0,247.0,0.99358,3.34,0.44,9.6 +4.2,0.215,0.23,5.1,0.041,64.0,157.0,0.99688,3.42,0.44,8.0 +8.1,0.24,0.32,10.5,0.03,34.0,105.0,0.99407,3.11,0.42,11.8 +5.8,0.23,0.2,2.0,0.043,39.0,154.0,0.99226,3.21,0.39,10.2 +7.5,0.33,0.36,2.6,0.051,26.0,126.0,0.99097,3.32,0.53,12.7 +6.6,0.38,0.36,9.2,0.061,42.0,214.0,0.9976,3.31,0.56,9.4 +6.4,0.15,0.29,1.8,0.044,21.0,115.0,0.99166,3.1,0.38,10.2 +6.5,0.32,0.34,5.7,0.044,27.0,91.0,0.99184,3.28,0.6,12.0 +7.5,0.22,0.32,2.4,0.045,29.0,100.0,0.99135,3.08,0.6,11.3 +6.4,0.23,0.32,1.9,0.038,40.0,118.0,0.99074,3.32,0.53,11.8 +6.1,0.22,0.31,1.4,0.039,40.0,129.0,0.99193,3.45,0.59,10.9 +6.5,0.48,0.02,0.9,0.043,32.0,99.0,0.99226,3.14,0.47,9.8 +6.6,0.23,0.3,4.6,0.06,29.0,154.0,0.99142,3.23,0.49,12.2 +6.4,0.16,0.25,1.4,0.057,21.0,125.0,0.99091,3.23,0.44,11.1 +6.6,0.38,0.36,9.2,0.061,42.0,214.0,0.9976,3.31,0.56,9.4 +7.4,0.16,0.32,1.4,0.065,23.0,140.0,0.99134,3.06,0.47,11.4 +6.4,0.15,0.29,1.8,0.044,21.0,115.0,0.99166,3.1,0.38,10.2 +6.5,0.32,0.3,2.3,0.051,20.0,127.0,0.98964,3.13,0.52,12.8 +6.7,0.12,0.36,2.3,0.039,43.0,125.0,0.99229,3.07,0.67,10.1 +6.6,0.2,0.14,4.4,0.184,35.0,168.0,0.99396,2.93,0.45,9.4 +8.0,0.34,0.25,6.4,0.035,38.0,103.0,0.99148,2.91,0.23,12.2 +6.8,0.21,0.31,2.9,0.046,40.0,121.0,0.9913,3.07,0.65,10.9 +6.8,0.23,0.31,2.8,0.047,40.0,122.0,0.99126,3.06,0.64,10.9 +6.8,0.21,0.31,2.9,0.046,40.0,121.0,0.9913,3.07,0.65,10.9 +6.6,0.2,0.14,4.4,0.184,35.0,168.0,0.99396,2.93,0.45,9.4 +6.6,0.28,0.42,8.2,0.044,60.0,196.0,0.99562,3.14,0.48,9.4 +7.8,0.25,0.37,1.0,0.043,10.0,80.0,0.99128,3.08,0.38,11.4 +5.6,0.12,0.33,2.9,0.044,21.0,73.0,0.98896,3.17,0.32,12.9 +6.6,0.28,0.41,7.0,0.046,59.0,194.0,0.99558,3.14,0.48,9.4 +6.8,0.17,0.35,1.8,0.04,29.0,84.0,0.98961,2.91,0.57,12.0 +7.3,0.25,0.28,1.5,0.043,19.0,113.0,0.99338,3.38,0.56,10.1 +6.6,0.28,0.41,7.0,0.046,59.0,194.0,0.99558,3.14,0.48,9.4 +6.6,0.28,0.42,8.2,0.044,60.0,196.0,0.99562,3.14,0.48,9.4 +6.5,0.25,0.5,7.6,0.047,54.0,184.0,0.99572,3.17,0.45,9.2 +6.3,0.24,0.35,2.3,0.039,43.0,109.0,0.99056,3.34,0.44,11.8 +6.8,0.32,0.32,8.7,0.029,31.0,105.0,0.99146,3.0,0.34,12.3 +7.8,0.25,0.37,1.0,0.043,10.0,80.0,0.99128,3.08,0.38,11.4 +5.6,0.12,0.33,2.9,0.044,21.0,73.0,0.98896,3.17,0.32,12.9 +6.6,0.24,0.28,6.7,0.032,26.0,91.0,0.99172,3.13,0.32,12.3 +6.3,0.22,0.34,5.0,0.032,36.0,93.0,0.99012,3.27,0.36,13.5 +6.0,0.32,0.3,1.9,0.033,41.0,142.0,0.98912,3.29,0.42,12.8 +6.3,0.19,0.29,2.0,0.022,33.0,96.0,0.98902,3.04,0.54,12.8 +6.0,0.32,0.3,1.9,0.033,41.0,142.0,0.98912,3.29,0.42,12.8 +9.4,0.24,0.29,8.5,0.037,124.0,208.0,0.99395,2.9,0.38,11.0 +6.4,0.35,0.28,12.6,0.039,19.0,124.0,0.99539,3.2,0.43,10.6 +6.7,0.46,0.27,5.2,0.039,35.0,96.0,0.99129,3.16,0.44,12.4 +6.3,0.3,0.29,2.1,0.048,33.0,142.0,0.98956,3.22,0.46,12.9 +6.0,0.19,0.29,1.1,0.047,67.0,152.0,0.9916,3.54,0.59,11.1 +5.9,0.24,0.28,1.3,0.032,36.0,95.0,0.98889,3.08,0.64,12.9 +7.3,0.145,0.33,1.1,0.042,14.0,64.0,0.99012,3.1,0.37,11.8 +6.6,0.435,0.38,9.2,0.058,66.0,243.0,0.99833,3.23,0.54,9.1 +5.8,0.18,0.37,1.2,0.036,19.0,74.0,0.98853,3.09,0.49,12.7 +5.8,0.18,0.37,1.1,0.036,31.0,96.0,0.98942,3.16,0.48,12.0 +5.6,0.32,0.32,8.3,0.043,32.0,105.0,0.99266,3.24,0.47,11.2 +6.6,0.16,0.35,1.8,0.042,26.0,105.0,0.98962,3.19,0.75,12.4 +5.1,0.21,0.28,1.4,0.047,48.0,148.0,0.99168,3.5,0.49,10.4 +7.5,0.29,0.36,15.7,0.05,29.0,124.0,0.9968,3.06,0.54,10.4 +6.0,0.26,0.33,4.35,0.04,15.0,80.0,0.98934,3.29,0.5,12.7 +5.7,0.26,0.3,1.8,0.039,30.0,105.0,0.98995,3.48,0.52,12.5 +7.1,0.17,0.31,1.6,0.037,15.0,103.0,0.991,3.14,0.5,12.0 +6.9,0.17,0.3,2.0,0.047,13.0,117.0,0.99152,3.16,0.51,11.6 +6.8,0.25,0.28,5.0,0.035,42.0,126.0,0.99048,3.12,0.38,12.6 +6.6,0.17,0.28,1.8,0.042,62.0,178.0,0.99204,3.15,0.42,10.2 +5.8,0.17,0.36,1.3,0.036,11.0,70.0,0.99202,3.43,0.68,10.4 +6.4,0.24,0.29,1.0,0.038,18.0,122.0,0.9906,3.3,0.42,11.5 +6.7,0.21,0.34,1.4,0.049,36.0,112.0,0.99091,3.02,0.5,11.0 +6.7,0.23,0.33,8.1,0.048,45.0,176.0,0.99472,3.11,0.52,10.1 +6.8,0.23,0.32,8.6,0.046,47.0,159.0,0.99452,3.08,0.52,10.5 +6.5,0.22,0.28,3.7,0.059,29.0,151.0,0.99177,3.23,0.41,12.1 +5.1,0.165,0.22,5.7,0.047,42.0,146.0,0.9934,3.18,0.55,9.9 +6.6,0.425,0.25,2.35,0.034,23.0,87.0,0.99082,3.05,0.41,11.4 +6.9,0.38,0.29,13.65,0.048,52.0,189.0,0.99784,3.0,0.6,9.5 +6.9,0.38,0.29,13.65,0.048,52.0,189.0,0.99784,3.0,0.6,9.5 +6.9,0.38,0.29,13.65,0.048,52.0,189.0,0.99784,3.0,0.6,9.5 +7.2,0.27,0.28,15.2,0.046,6.0,41.0,0.99665,3.17,0.39,10.9 +7.6,0.17,0.27,4.6,0.05,23.0,98.0,0.99422,3.08,0.47,9.5 +6.2,0.3,0.31,1.2,0.048,19.0,125.0,0.98999,3.32,0.54,12.6 +7.6,0.17,0.27,4.6,0.05,23.0,98.0,0.99422,3.08,0.47,9.5 +6.5,0.26,0.32,6.65,0.059,34.0,104.0,0.99254,3.18,0.42,11.1 +6.9,0.36,0.28,13.55,0.048,51.0,189.0,0.99782,3.0,0.6,9.5 +6.9,0.38,0.29,13.65,0.048,52.0,189.0,0.99784,3.0,0.6,9.5 +6.8,0.18,0.24,9.8,0.058,64.0,188.0,0.9952,3.13,0.51,10.6 +6.7,0.18,0.24,10.3,0.057,64.0,185.0,0.99519,3.12,0.5,10.6 +6.6,0.16,0.21,6.7,0.055,43.0,157.0,0.99384,3.15,0.52,10.8 +7.2,0.27,0.28,15.2,0.046,6.0,41.0,0.99665,3.17,0.39,10.9 +6.4,0.17,0.27,9.9,0.047,26.0,101.0,0.99596,3.34,0.5,9.9 +7.2,0.22,0.28,7.2,0.06,41.0,132.0,0.9935,3.08,0.59,11.3 +6.0,0.22,0.28,1.1,0.034,47.0,90.0,0.98862,3.22,0.38,12.6 +6.7,0.36,0.28,8.3,0.034,29.0,81.0,0.99151,2.96,0.39,12.5 +6.5,0.43,0.28,11.25,0.032,31.0,87.0,0.9922,3.02,0.38,12.4 +5.9,0.2,0.28,12.8,0.038,29.0,132.0,0.99426,3.31,0.57,11.8 +5.3,0.32,0.23,9.65,0.026,26.0,119.0,0.99168,3.18,0.53,12.2 +6.8,0.2,0.28,12.6,0.048,54.0,136.0,0.99556,3.19,0.37,10.7 +6.0,0.22,0.33,12.2,0.033,25.0,97.0,0.99356,3.17,0.42,11.3 +6.7,0.36,0.28,8.3,0.034,29.0,81.0,0.99151,2.96,0.39,12.5 +6.5,0.43,0.28,11.25,0.032,31.0,87.0,0.9922,3.02,0.38,12.4 +7.1,0.18,0.49,1.3,0.033,12.0,72.0,0.99072,3.05,0.53,11.3 +6.4,0.17,0.27,9.9,0.047,26.0,101.0,0.99596,3.34,0.5,9.9 +7.2,0.22,0.28,7.2,0.06,41.0,132.0,0.9935,3.08,0.59,11.3 +6.0,0.22,0.28,1.1,0.034,47.0,90.0,0.98862,3.22,0.38,12.6 +6.0,0.2,0.26,1.1,0.033,38.0,67.0,0.98954,3.14,0.38,11.5 +7.6,0.2,0.26,4.8,0.033,26.0,76.0,0.99076,2.98,0.49,12.3 +6.2,0.3,0.21,1.1,0.032,31.0,111.0,0.9889,2.97,0.42,12.2 +6.0,0.29,0.25,1.4,0.033,30.0,114.0,0.98794,3.08,0.43,13.2 +6.6,0.18,0.28,1.7,0.041,53.0,161.0,0.99207,3.13,0.45,10.2 +7.0,0.22,0.28,10.6,0.039,32.0,117.0,0.99355,3.05,0.55,11.5 +6.0,0.29,0.25,1.4,0.033,30.0,114.0,0.98794,3.08,0.43,13.2 +6.2,0.3,0.21,1.1,0.032,31.0,111.0,0.9889,2.97,0.42,12.2 +5.6,0.15,0.26,5.55,0.051,51.0,139.0,0.99336,3.47,0.5,11.0 +6.9,0.28,0.24,2.1,0.034,49.0,121.0,0.98882,2.98,0.43,13.2 +5.9,0.19,0.21,1.7,0.045,57.0,135.0,0.99341,3.32,0.44,9.5 +7.8,0.22,0.26,9.0,0.047,38.0,132.0,0.997,3.25,0.53,10.2 +6.6,0.18,0.28,1.7,0.041,53.0,161.0,0.99207,3.13,0.45,10.2 +7.0,0.4,0.25,1.8,0.05,51.0,189.0,0.99174,3.0,0.55,11.4 +6.1,0.28,0.27,4.7,0.03,56.0,140.0,0.99042,3.16,0.42,12.5 +7.6,0.36,0.49,11.3,0.046,87.0,221.0,0.9984,3.01,0.43,9.2 +6.5,0.28,0.34,3.6,0.04,29.0,121.0,0.99111,3.28,0.48,12.1 +6.9,0.19,0.35,6.9,0.045,51.0,125.0,0.9933,3.1,0.44,10.7 +6.5,0.28,0.34,3.6,0.04,29.0,121.0,0.99111,3.28,0.48,12.1 +6.4,0.22,0.32,4.9,0.046,50.0,156.0,0.99316,3.38,0.55,11.2 +6.8,0.23,0.3,6.95,0.044,42.0,179.0,0.9946,3.25,0.56,10.6 +6.4,0.32,0.31,1.9,0.037,34.0,126.0,0.99,3.06,0.45,11.8 +6.1,0.28,0.27,4.7,0.03,56.0,140.0,0.99042,3.16,0.42,12.5 +7.6,0.36,0.49,11.3,0.046,87.0,221.0,0.9984,3.01,0.43,9.2 +8.8,0.39,0.35,1.8,0.096,22.0,80.0,0.99016,2.95,0.54,12.6 +6.6,0.24,0.3,11.3,0.026,11.0,77.0,0.99381,3.13,0.55,12.8 +6.9,0.29,0.3,8.2,0.026,35.0,112.0,0.99144,3.0,0.37,12.3 +6.9,0.28,0.3,8.3,0.026,37.0,113.0,0.99139,2.99,0.38,12.3 +6.7,0.38,0.26,9.55,0.036,35.0,91.0,0.9919,2.98,0.37,12.4 +8.0,0.28,0.3,8.4,0.03,35.0,115.0,0.99192,2.93,0.42,12.3 +6.5,0.25,0.45,7.8,0.048,52.0,188.0,0.99576,3.2,0.53,9.1 +6.6,0.26,0.46,7.8,0.047,48.0,186.0,0.9958,3.2,0.54,9.1 +7.4,0.29,0.28,10.2,0.032,43.0,138.0,0.9951,3.1,0.47,10.6 +6.3,0.19,0.29,5.5,0.042,44.0,189.0,0.99304,3.19,0.47,10.3 +6.1,0.33,0.32,7.8,0.052,52.0,183.0,0.99657,3.39,0.65,9.5 +5.6,0.32,0.33,7.4,0.037,25.0,95.0,0.99268,3.25,0.49,11.1 +7.7,0.46,0.18,3.3,0.054,18.0,143.0,0.99392,3.12,0.51,10.8 +8.8,0.19,0.3,5.0,0.028,34.0,120.0,0.99242,2.94,0.47,11.2 +7.7,0.46,0.18,3.3,0.054,18.0,143.0,0.99392,3.12,0.51,10.8 +8.8,0.27,0.25,5.0,0.024,52.0,99.0,0.9925,2.87,0.49,11.4 +5.8,0.18,0.28,1.3,0.034,9.0,94.0,0.99092,3.21,0.52,11.2 +5.8,0.15,0.32,1.2,0.037,14.0,119.0,0.99137,3.19,0.5,10.2 +5.6,0.32,0.33,7.4,0.037,25.0,95.0,0.99268,3.25,0.49,11.1 +6.1,0.33,0.32,7.8,0.052,52.0,183.0,0.99657,3.39,0.65,9.5 +7.1,0.32,0.3,9.9,0.041,63.0,192.0,0.99642,3.12,0.49,10.2 +6.2,0.23,0.35,0.7,0.051,24.0,111.0,0.9916,3.37,0.43,11.0 +8.9,0.3,0.35,4.6,0.032,32.0,148.0,0.99458,3.15,0.45,11.5 +6.0,0.14,0.17,5.6,0.036,37.0,127.0,0.99373,3.05,0.57,9.8 +6.8,0.24,0.29,9.5,0.042,56.0,157.0,0.99586,3.11,0.51,10.1 +6.7,0.21,0.48,14.8,0.05,31.0,195.0,0.99942,2.95,0.75,8.8 +8.9,0.3,0.35,4.6,0.032,32.0,148.0,0.99458,3.15,0.45,11.5 +6.1,0.3,0.3,2.1,0.031,50.0,163.0,0.9895,3.39,0.43,12.7 +7.2,0.37,0.4,11.6,0.032,34.0,214.0,0.9963,3.1,0.51,9.8 +6.7,0.64,0.3,1.2,0.03,18.0,76.0,0.9892,3.16,0.6,12.9 +7.2,0.37,0.4,11.6,0.032,34.0,214.0,0.9963,3.1,0.51,9.8 +6.1,0.3,0.3,2.1,0.031,50.0,163.0,0.9895,3.39,0.43,12.7 +7.6,0.28,0.49,20.15,0.06,30.0,145.0,1.00196,3.01,0.44,8.5 +6.3,0.29,0.28,4.7,0.059,28.0,81.0,0.99036,3.24,0.56,12.7 +6.2,0.28,0.28,4.3,0.026,22.0,105.0,0.989,2.98,0.64,13.1 +7.1,0.18,0.39,14.5,0.051,48.0,156.0,0.99947,3.35,0.78,9.1 +6.4,0.32,0.27,4.9,0.034,18.0,122.0,0.9916,3.36,0.71,12.5 +7.1,0.17,0.4,14.55,0.047,47.0,156.0,0.99945,3.34,0.78,9.1 +7.1,0.17,0.4,14.55,0.047,47.0,156.0,0.99945,3.34,0.78,9.1 +5.8,0.24,0.26,10.05,0.039,63.0,162.0,0.99375,3.33,0.5,11.2 +6.4,0.32,0.27,4.9,0.034,18.0,122.0,0.9916,3.36,0.71,12.5 +7.1,0.18,0.39,14.5,0.051,48.0,156.0,0.99947,3.35,0.78,9.1 +7.1,0.17,0.4,14.55,0.047,47.0,156.0,0.99945,3.34,0.78,9.1 +7.1,0.18,0.39,15.25,0.047,45.0,158.0,0.99946,3.34,0.77,9.1 +7.8,0.29,0.29,3.15,0.044,41.0,117.0,0.99153,3.24,0.35,11.5 +6.2,0.255,0.27,1.3,0.037,30.0,86.0,0.98834,3.05,0.59,12.9 +8.2,0.34,0.29,5.2,0.076,19.0,92.0,0.99138,2.95,0.39,12.5 +6.5,0.24,0.28,1.1,0.034,26.0,83.0,0.98928,3.25,0.33,12.3 +6.9,0.24,0.23,7.1,0.041,20.0,97.0,0.99246,3.1,0.85,11.4 +6.7,0.4,0.22,8.8,0.052,24.0,113.0,0.99576,3.22,0.45,9.4 +6.7,0.3,0.44,18.5,0.057,65.0,224.0,0.99956,3.11,0.53,9.1 +6.7,0.4,0.22,8.8,0.052,24.0,113.0,0.99576,3.22,0.45,9.4 +6.8,0.17,0.32,1.4,0.04,35.0,106.0,0.99026,3.16,0.66,12.0 +7.1,0.25,0.28,1.2,0.04,31.0,111.0,0.99174,3.18,0.53,11.1 +5.9,0.27,0.27,5.0,0.035,14.0,97.0,0.99058,3.1,0.33,11.8 +6.0,0.16,0.22,1.6,0.042,36.0,106.0,0.9905,3.24,0.32,11.4 +6.7,0.3,0.44,18.75,0.057,65.0,224.0,0.99956,3.11,0.53,9.1 +6.6,0.15,0.32,6.0,0.033,59.0,128.0,0.99192,3.19,0.71,12.1 +7.3,0.34,0.3,9.4,0.057,34.0,178.0,0.99554,3.15,0.44,10.4 +6.0,0.17,0.29,9.7,0.044,33.0,98.0,0.99536,3.12,0.36,9.2 +6.7,0.47,0.29,4.75,0.034,29.0,134.0,0.99056,3.29,0.46,13.0 +6.6,0.15,0.32,6.0,0.033,59.0,128.0,0.99192,3.19,0.71,12.1 +6.6,0.21,0.29,5.35,0.029,43.0,106.0,0.99112,2.93,0.43,11.5 +6.6,0.21,0.29,5.35,0.029,43.0,106.0,0.99112,2.93,0.43,11.5 +8.0,0.24,0.48,6.8,0.047,13.0,134.0,0.99616,3.23,0.7,10.0 +5.6,0.34,0.3,6.9,0.038,23.0,89.0,0.99266,3.25,0.49,11.1 +5.8,0.54,0.0,1.4,0.033,40.0,107.0,0.98918,3.26,0.35,12.4 +7.3,0.23,0.24,0.9,0.031,29.0,86.0,0.98926,2.9,0.38,12.2 +6.0,0.39,0.13,1.2,0.042,60.0,172.0,0.99114,3.06,0.52,10.6 +6.1,0.105,0.31,1.3,0.037,55.0,145.0,0.9912,3.41,0.41,11.1 +5.8,0.32,0.2,2.6,0.027,17.0,123.0,0.98936,3.36,0.78,13.9 +7.6,0.22,0.28,12.0,0.056,68.0,143.0,0.9983,2.99,0.3,9.2 +6.8,0.19,0.4,9.85,0.055,41.0,103.0,0.99532,2.98,0.56,10.5 +6.7,0.24,0.3,3.85,0.042,105.0,179.0,0.99189,3.04,0.59,11.3 +6.8,0.17,0.34,2.0,0.04,38.0,111.0,0.99,3.24,0.45,12.9 +6.2,0.3,0.31,1.6,0.035,40.0,106.0,0.98914,3.26,0.39,12.9 +6.9,0.29,0.41,7.8,0.046,52.0,171.0,0.99537,3.12,0.51,9.6 +6.8,0.19,0.34,1.9,0.04,41.0,108.0,0.99,3.25,0.45,12.9 +6.8,0.17,0.34,2.0,0.04,38.0,111.0,0.99,3.24,0.45,12.9 +6.6,0.24,0.27,10.3,0.047,54.0,219.0,0.99742,3.04,0.45,8.8 +6.6,0.16,0.36,1.1,0.031,27.0,93.0,0.98884,3.23,0.34,13.2 +7.6,0.22,0.28,12.0,0.056,68.0,143.0,0.9983,2.99,0.3,9.2 +6.7,0.24,0.3,3.85,0.042,105.0,179.0,0.99189,3.04,0.59,11.3 +6.8,0.19,0.4,9.85,0.055,41.0,103.0,0.99532,2.98,0.56,10.5 +6.7,0.16,0.36,2.0,0.045,24.0,131.0,0.99284,3.3,0.59,10.5 +6.5,0.3,0.27,4.0,0.038,37.0,97.0,0.99026,3.2,0.6,12.6 +6.5,0.22,0.19,1.1,0.064,36.0,191.0,0.99297,3.05,0.5,9.5 +6.2,0.36,0.45,10.4,0.06,22.0,184.0,0.99711,3.31,0.56,9.8 +6.2,0.37,0.24,6.1,0.032,19.0,86.0,0.98934,3.04,0.26,13.4 +7.6,0.31,0.24,1.8,0.037,39.0,150.0,0.9913,3.05,0.44,11.8 +6.2,0.36,0.45,10.4,0.06,22.0,184.0,0.99711,3.31,0.56,9.8 +5.9,0.32,0.28,4.7,0.039,34.0,94.0,0.98964,3.22,0.57,13.1 +6.5,0.3,0.27,4.0,0.038,37.0,97.0,0.99026,3.2,0.6,12.6 +5.8,0.22,0.3,1.1,0.047,36.0,131.0,0.992,3.26,0.45,10.4 +5.4,0.45,0.27,6.4,0.033,20.0,102.0,0.98944,3.22,0.27,13.4 +6.1,0.36,0.26,8.15,0.035,14.0,88.0,0.99031,3.06,0.27,13.0 +6.2,0.37,0.24,6.1,0.032,19.0,86.0,0.98934,3.04,0.26,13.4 +7.5,0.21,0.32,4.8,0.056,39.0,113.0,0.99393,3.11,0.52,10.2 +6.9,0.28,0.33,1.2,0.039,16.0,98.0,0.9904,3.07,0.39,11.7 +6.5,0.22,0.19,1.1,0.064,36.0,191.0,0.99297,3.05,0.5,9.5 +7.8,0.2,0.2,1.4,0.036,25.0,83.0,0.99088,3.03,0.46,11.7 +6.7,0.28,0.31,7.4,0.041,7.0,81.0,0.99254,3.04,0.47,11.4 +7.6,0.31,0.24,1.8,0.037,39.0,150.0,0.9913,3.05,0.44,11.8 +8.0,0.2,0.44,1.0,0.057,24.0,111.0,0.99158,3.09,0.32,11.2 +6.0,0.28,0.27,15.5,0.036,31.0,134.0,0.99408,3.19,0.44,13.0 +6.0,0.28,0.27,15.5,0.036,31.0,134.0,0.99408,3.19,0.44,13.0 +6.7,0.24,0.36,8.4,0.042,42.0,123.0,0.99473,3.34,0.52,10.9 +6.3,0.22,0.28,2.4,0.042,38.0,102.0,0.98998,3.14,0.37,11.6 +6.0,0.24,0.28,3.95,0.038,61.0,134.0,0.99146,3.3,0.54,11.3 +7.7,0.43,1.0,19.95,0.032,42.0,164.0,0.99742,3.29,0.5,12.0 +6.4,0.3,0.36,2.0,0.052,18.0,141.0,0.99273,3.38,0.53,10.5 +6.1,0.33,0.3,3.0,0.036,30.0,124.0,0.98922,3.31,0.4,13.1 +6.0,0.28,0.27,15.5,0.036,31.0,134.0,0.99408,3.19,0.44,13.0 +6.7,0.24,0.36,8.4,0.042,42.0,123.0,0.99473,3.34,0.52,10.9 +6.7,0.29,0.45,14.3,0.054,30.0,181.0,0.99869,3.14,0.57,9.1 +6.9,0.33,0.31,4.2,0.04,21.0,93.0,0.9896,3.18,0.48,13.4 +6.5,0.16,0.34,1.4,0.029,29.0,133.0,0.99108,3.33,0.64,11.5 +6.0,0.2,0.32,3.0,0.031,26.0,118.0,0.99134,3.38,0.68,11.2 +7.5,0.33,0.28,4.9,0.042,21.0,155.0,0.99385,3.36,0.57,10.9 +7.1,0.36,0.28,2.4,0.036,35.0,115.0,0.98936,3.19,0.44,13.5 +6.7,0.29,0.45,14.3,0.054,30.0,181.0,0.99869,3.14,0.57,9.1 +6.4,0.26,0.25,10.7,0.046,66.0,179.0,0.99606,3.17,0.55,9.9 +7.0,0.22,0.24,11.0,0.041,75.0,167.0,0.99508,2.98,0.56,10.5 +6.5,0.19,0.28,1.4,0.046,22.0,90.0,0.99038,3.18,0.51,11.7 +6.3,0.21,0.31,1.2,0.043,30.0,117.0,0.99158,3.49,0.68,11.0 +7.9,0.35,0.28,12.9,0.032,13.0,63.0,0.9932,2.99,0.43,13.0 +7.7,0.38,0.23,10.8,0.03,28.0,95.0,0.99164,2.93,0.41,13.6 +6.8,0.19,0.33,1.3,0.031,22.0,87.0,0.98987,3.08,0.62,12.3 +7.2,0.33,0.34,2.0,0.044,61.0,171.0,0.98947,3.25,0.53,13.3 +6.6,0.29,0.29,1.8,0.036,38.0,102.0,0.98819,3.08,0.42,13.7 +7.5,0.2,0.41,1.2,0.05,26.0,131.0,0.99133,3.19,0.52,11.1 +6.9,0.33,0.62,7.5,0.038,46.0,132.0,0.99143,3.23,0.43,13.4 +6.0,0.23,0.15,9.7,0.048,101.0,207.0,0.99571,3.05,0.3,9.1 +5.9,0.23,0.24,3.8,0.038,61.0,152.0,0.99139,3.31,0.5,11.3 +6.6,0.32,0.41,7.2,0.048,55.0,178.0,0.99537,3.2,0.46,9.4 +6.0,0.23,0.15,9.7,0.048,101.0,207.0,0.99571,3.05,0.3,9.1 +5.3,0.36,0.27,6.3,0.028,40.0,132.0,0.99186,3.37,0.4,11.6 +5.3,0.36,0.27,6.3,0.028,40.0,132.0,0.99186,3.37,0.4,11.6 +8.9,0.27,0.28,0.8,0.024,29.0,128.0,0.98984,3.01,0.35,12.4 +7.6,0.23,0.29,8.6,0.053,65.0,146.0,0.9963,3.11,0.32,9.8 +6.9,0.75,0.13,6.3,0.036,19.0,50.0,0.99312,3.09,0.25,11.1 +7.1,0.35,0.27,3.1,0.034,28.0,134.0,0.9897,3.26,0.38,13.1 +7.2,0.31,0.35,7.2,0.046,45.0,178.0,0.9955,3.14,0.53,9.7 +6.4,0.28,0.44,7.1,0.048,49.0,179.0,0.99528,3.15,0.48,9.2 +7.2,0.23,0.46,6.4,0.036,17.0,85.0,0.99279,3.1,0.78,11.7 +6.6,0.22,0.3,14.7,0.045,50.0,136.0,0.99704,3.14,0.37,10.6 +7.2,0.31,0.35,7.2,0.046,45.0,178.0,0.9955,3.14,0.53,9.7 +6.4,0.28,0.44,7.1,0.048,49.0,179.0,0.99528,3.15,0.48,9.2 +7.2,0.24,0.28,1.9,0.032,30.0,92.0,0.9914,3.1,0.39,10.9 +6.2,0.27,0.47,1.2,0.146,28.0,105.0,0.99224,3.23,0.51,10.1 +6.5,0.28,0.25,4.8,0.029,54.0,128.0,0.99074,3.17,0.44,12.2 +7.2,0.27,0.31,1.2,0.031,27.0,80.0,0.98892,3.03,0.33,12.7 +7.8,0.28,0.25,3.4,0.024,27.0,99.0,0.98959,2.98,0.37,13.0 +8.1,0.26,0.27,4.3,0.03,43.0,123.0,0.99212,3.16,0.33,11.2 +6.6,0.23,0.37,8.5,0.036,46.0,153.0,0.99576,3.2,0.48,9.4 +6.0,0.33,0.2,1.8,0.031,49.0,159.0,0.9919,3.41,0.53,11.0 +6.0,0.33,0.2,1.8,0.031,49.0,159.0,0.9919,3.41,0.53,11.0 +7.3,0.2,0.29,19.5,0.039,69.0,237.0,1.00037,3.1,0.48,9.2 +6.6,0.23,0.37,8.5,0.036,46.0,153.0,0.99576,3.2,0.48,9.4 +7.3,0.2,0.29,19.9,0.039,69.0,237.0,1.00037,3.1,0.48,9.2 +6.2,0.47,0.19,8.3,0.029,24.0,142.0,0.992,3.22,0.45,12.3 +6.0,0.33,0.2,1.8,0.031,49.0,159.0,0.9919,3.41,0.53,11.0 +7.2,0.14,0.32,1.1,0.022,48.0,116.0,0.99218,3.04,0.67,10.0 +5.7,0.22,0.22,16.65,0.044,39.0,110.0,0.99855,3.24,0.48,9.0 +5.7,0.22,0.22,16.65,0.044,39.0,110.0,0.99855,3.24,0.48,9.0 +5.7,0.22,0.22,16.65,0.044,39.0,110.0,0.99855,3.24,0.48,9.0 +8.1,0.2,0.28,0.9,0.023,49.0,87.0,0.99062,2.92,0.36,11.1 +5.8,0.14,0.15,6.1,0.042,27.0,123.0,0.99362,3.06,0.6,9.9 +4.8,0.21,0.21,10.2,0.037,17.0,112.0,0.99324,3.66,0.48,12.2 +8.1,0.2,0.28,0.9,0.023,49.0,87.0,0.99062,2.92,0.36,11.1 +5.7,0.22,0.22,16.65,0.044,39.0,110.0,0.99855,3.24,0.48,9.0 +7.5,0.34,0.24,3.85,0.031,5.0,34.0,0.99098,3.01,0.36,11.8 +6.6,0.64,0.28,4.4,0.032,19.0,78.0,0.99036,3.11,0.62,12.9 +7.0,0.48,0.12,4.5,0.05,23.0,86.0,0.99398,2.86,0.35,9.0 +7.6,0.37,0.34,3.2,0.028,42.0,162.0,0.9903,3.01,0.33,12.4 +7.0,0.48,0.12,4.5,0.05,23.0,86.0,0.99398,2.86,0.35,9.0 +6.6,0.64,0.28,4.4,0.032,19.0,78.0,0.99036,3.11,0.62,12.9 +8.0,0.25,0.27,9.7,0.036,15.0,85.0,0.99406,2.99,0.36,11.2 +7.6,0.38,0.28,4.2,0.029,7.0,112.0,0.9906,3.0,0.41,12.6 +6.9,0.26,0.27,4.2,0.031,20.0,80.0,0.99089,3.12,0.39,11.5 +7.8,0.15,0.34,1.1,0.035,31.0,93.0,0.99096,3.07,0.72,11.3 +8.0,0.25,0.27,9.7,0.036,15.0,85.0,0.99406,2.99,0.36,11.2 +6.9,0.26,0.27,4.2,0.031,20.0,80.0,0.99089,3.12,0.39,11.5 +5.9,0.655,0.0,5.6,0.033,8.0,31.0,0.9936,3.32,0.51,10.5 +7.6,0.38,0.28,4.2,0.029,7.0,112.0,0.9906,3.0,0.41,12.6 +7.8,0.31,0.4,1.6,0.027,20.0,87.0,0.9911,3.15,0.48,11.9 +8.1,0.17,0.21,1.6,0.036,24.0,119.0,0.99396,3.18,0.52,10.1 +6.8,0.18,0.28,1.1,0.027,32.0,112.0,0.99089,3.15,0.45,11.0 +7.4,0.28,0.36,14.6,0.048,35.0,161.0,0.9968,3.14,0.56,10.6 +7.3,0.23,0.27,2.6,0.035,39.0,120.0,0.99138,3.04,0.59,11.3 +6.7,0.22,0.22,1.2,0.038,5.0,124.0,0.99098,3.1,0.37,11.2 +7.4,0.25,0.28,7.25,0.028,14.0,78.0,0.99238,2.94,0.37,11.5 +7.5,0.3,0.21,6.55,0.026,33.0,143.0,0.99244,2.92,0.35,11.1 +7.2,0.26,0.24,7.0,0.023,19.0,130.0,0.99176,3.14,0.49,12.8 +6.3,0.32,0.32,1.5,0.037,12.0,76.0,0.98993,3.3,0.46,12.3 +7.7,0.24,0.3,1.4,0.041,15.0,102.0,0.9929,3.26,0.53,10.4 +7.4,0.25,0.28,7.25,0.028,14.0,78.0,0.99238,2.94,0.37,11.5 +7.0,0.24,0.35,1.0,0.032,42.0,104.0,0.98988,3.16,0.37,11.7 +5.8,0.28,0.28,4.2,0.044,52.0,158.0,0.992,3.35,0.44,10.7 +6.8,0.19,0.71,17.5,0.042,21.0,114.0,0.99784,2.85,0.5,9.5 +6.8,0.19,0.71,17.5,0.042,21.0,114.0,0.99784,2.85,0.5,9.5 +6.8,0.19,0.71,17.5,0.042,21.0,114.0,0.99784,2.85,0.5,9.5 +6.6,0.19,0.35,1.5,0.037,37.0,107.0,0.99006,3.18,0.68,12.0 +6.4,0.28,0.36,1.3,0.053,28.0,186.0,0.99211,3.31,0.45,10.8 +5.6,0.28,0.27,3.9,0.043,52.0,158.0,0.99202,3.35,0.44,10.7 +5.6,0.28,0.28,4.2,0.044,52.0,158.0,0.992,3.35,0.44,10.7 +6.8,0.19,0.32,7.6,0.049,37.0,107.0,0.99332,3.12,0.44,10.7 +7.2,0.16,0.29,1.0,0.031,40.0,123.0,0.98958,3.12,0.4,12.1 +6.6,0.17,0.28,1.1,0.034,55.0,108.0,0.98939,3.0,0.52,11.9 +6.6,0.19,0.28,11.8,0.042,54.0,137.0,0.99492,3.18,0.37,10.8 +5.8,0.2,0.24,1.4,0.033,65.0,169.0,0.99043,3.59,0.56,12.3 +6.6,0.39,0.38,9.7,0.053,49.0,226.0,0.99787,3.3,0.57,9.4 +6.8,0.12,0.3,12.9,0.049,32.0,88.0,0.99654,3.2,0.35,9.9 +6.6,0.295,0.24,1.6,0.039,29.0,140.0,0.99304,3.35,0.61,10.4 +6.6,0.26,0.24,7.2,0.038,28.0,137.0,0.9952,3.35,0.6,10.4 +7.0,0.32,0.27,7.1,0.027,37.0,122.0,0.99165,3.15,0.6,12.6 +7.4,0.36,0.23,1.9,0.017,31.0,69.0,0.9892,2.93,0.36,12.5 +6.7,0.35,0.48,8.8,0.056,35.0,167.0,0.99628,3.04,0.47,9.4 +6.4,0.38,0.24,7.2,0.047,41.0,151.0,0.99604,3.11,0.6,9.2 +6.8,0.14,0.18,1.4,0.047,30.0,90.0,0.99164,3.27,0.54,11.2 +7.0,0.16,0.25,14.3,0.044,27.0,149.0,0.998,2.91,0.46,9.2 +7.0,0.16,0.25,14.3,0.044,27.0,149.0,0.998,2.91,0.46,9.2 +6.7,0.35,0.48,8.8,0.056,35.0,167.0,0.99628,3.04,0.47,9.4 +6.8,0.14,0.18,1.4,0.047,30.0,90.0,0.99164,3.27,0.54,11.2 +6.8,0.16,0.18,1.8,0.046,31.0,114.0,0.99226,3.27,0.55,10.8 +7.0,0.16,0.25,14.3,0.044,27.0,149.0,0.998,2.91,0.46,9.2 +6.4,0.38,0.24,7.2,0.047,41.0,151.0,0.99604,3.11,0.6,9.2 +7.2,0.24,0.3,1.2,0.037,11.0,95.0,0.98914,2.96,0.36,12.5 +7.7,0.32,0.61,11.8,0.041,66.0,188.0,0.99794,3.0,0.54,9.3 +7.0,0.29,0.33,0.9,0.041,20.0,117.0,0.99048,3.21,0.5,11.4 +7.1,0.27,0.24,12.6,0.044,48.0,118.0,0.99726,3.04,0.56,10.0 +6.8,0.45,0.28,26.05,0.031,27.0,122.0,1.00295,3.06,0.42,10.6 +6.3,0.2,0.26,4.7,0.04,108.0,168.0,0.99278,3.07,0.75,10.7 +7.1,0.27,0.24,12.6,0.044,48.0,118.0,0.99726,3.04,0.56,10.0 +7.2,0.24,0.3,1.2,0.037,11.0,95.0,0.98914,2.96,0.36,12.5 +6.8,0.45,0.28,26.05,0.031,27.0,122.0,1.00295,3.06,0.42,10.6 +6.6,0.36,0.28,6.1,0.029,12.0,93.0,0.99054,3.19,0.27,12.8 +7.7,0.32,0.61,11.8,0.041,66.0,188.0,0.99794,3.0,0.54,9.3 +7.0,0.29,0.33,0.9,0.041,20.0,117.0,0.99048,3.21,0.5,11.4 +6.4,0.37,0.2,5.6,0.117,61.0,183.0,0.99459,3.24,0.43,9.5 +6.4,0.38,0.2,5.3,0.117,57.0,181.0,0.99459,3.24,0.43,9.5 +6.4,0.36,0.2,5.7,0.118,61.0,172.0,0.9946,3.24,0.43,9.5 +6.6,0.3,0.25,8.0,0.036,21.0,124.0,0.99362,3.06,0.38,10.8 +6.6,0.3,0.25,8.0,0.036,21.0,124.0,0.99362,3.06,0.38,10.8 +6.5,0.21,0.51,17.6,0.045,34.0,125.0,0.99966,3.2,0.47,8.8 +6.6,0.3,0.25,8.0,0.036,21.0,124.0,0.99362,3.06,0.38,10.8 +7.6,0.31,0.27,8.8,0.021,57.0,156.0,0.99442,3.08,0.38,11.0 +5.8,0.58,0.0,1.5,0.02,33.0,96.0,0.98918,3.29,0.38,12.4 +6.5,0.26,0.39,1.4,0.02,12.0,66.0,0.99089,3.25,0.75,11.3 +8.7,0.3,0.34,4.8,0.018,23.0,127.0,0.99474,3.12,0.49,11.2 +6.4,0.29,0.32,2.4,0.014,34.0,89.0,0.99008,3.24,0.66,12.5 +6.7,0.13,0.32,3.7,0.017,32.0,99.0,0.99348,3.12,0.44,10.0 +6.8,0.19,0.33,4.9,0.047,42.0,130.0,0.99283,3.12,0.56,11.0 +6.0,0.25,0.4,5.7,0.052,56.0,152.0,0.99398,3.16,0.88,10.5 +6.0,0.25,0.4,5.7,0.052,56.0,152.0,0.99398,3.16,0.88,10.5 +6.8,0.19,0.33,4.9,0.047,42.0,130.0,0.99283,3.12,0.56,11.0 +6.4,0.24,0.23,2.0,0.046,30.0,133.0,0.9908,3.12,0.54,11.4 +5.9,0.18,0.28,5.1,0.039,50.0,139.0,0.99165,3.16,0.44,11.3 +7.2,0.33,0.22,4.5,0.031,10.0,73.0,0.99076,2.97,0.52,12.2 +6.4,0.29,0.24,3.2,0.037,31.0,95.0,0.98942,2.9,0.66,12.6 +7.3,0.31,0.25,6.65,0.032,30.0,138.0,0.99244,2.9,0.37,11.1 +7.0,0.29,0.37,1.6,0.035,34.0,126.0,0.99058,3.26,0.47,12.3 +6.9,0.19,0.6,4.0,0.037,6.0,122.0,0.99255,2.92,0.59,10.4 +6.3,0.32,0.17,17.75,0.06,51.0,190.0,0.99916,3.13,0.48,8.8 +6.6,0.085,0.33,1.4,0.036,17.0,109.0,0.99306,3.27,0.61,9.5 +6.3,0.32,0.17,17.75,0.06,51.0,190.0,0.99916,3.13,0.48,8.8 +6.8,0.18,0.32,7.2,0.047,17.0,109.0,0.99498,3.42,0.44,10.4 +6.8,0.52,0.26,5.7,0.038,27.0,130.0,0.99,3.11,0.27,13.0 +7.1,0.28,0.28,8.5,0.03,25.0,191.0,0.99338,3.16,0.46,12.2 +5.7,0.15,0.47,11.4,0.035,49.0,128.0,0.99456,3.03,0.34,10.5 +5.8,0.275,0.3,5.4,0.043,41.0,149.0,0.9926,3.33,0.42,10.8 +5.4,0.53,0.16,2.7,0.036,34.0,128.0,0.98856,3.2,0.53,13.2 +5.8,0.32,0.28,4.3,0.032,46.0,115.0,0.98946,3.16,0.57,13.0 +6.7,0.22,0.39,1.2,0.049,26.0,152.0,0.99346,3.5,0.47,10.0 +6.1,0.6,0.12,1.8,0.05,11.0,76.0,0.99268,3.42,0.48,10.4 +6.5,0.26,0.31,1.3,0.034,59.0,145.0,0.98944,3.16,0.54,12.4 +5.0,0.29,0.54,5.7,0.035,54.0,155.0,0.98976,3.27,0.34,12.9 +5.4,0.53,0.16,2.7,0.036,34.0,128.0,0.98856,3.2,0.53,13.2 +6.8,0.21,0.26,11.7,0.038,61.0,152.0,0.99523,3.02,0.56,10.5 +5.8,0.32,0.28,4.3,0.032,46.0,115.0,0.98946,3.16,0.57,13.0 +6.5,0.27,0.26,11.0,0.03,2.0,82.0,0.99402,3.07,0.36,11.2 +5.9,0.37,0.32,1.6,0.029,41.0,102.0,0.98916,3.41,0.55,12.7 +6.2,0.21,0.18,11.6,0.044,61.0,155.0,0.99655,3.14,0.52,9.4 +6.8,0.3,0.29,6.2,0.025,29.0,95.0,0.99071,3.03,0.32,12.9 +7.3,0.41,0.29,1.8,0.032,26.0,74.0,0.98889,2.96,0.35,13.0 +5.4,0.3,0.3,1.2,0.029,25.0,93.0,0.98742,3.31,0.4,13.6 +6.6,0.34,0.2,1.0,0.053,26.0,112.0,0.99336,3.32,0.55,9.1 +5.6,0.25,0.19,2.4,0.049,42.0,166.0,0.992,3.25,0.43,10.4 +5.3,0.3,0.3,1.2,0.029,25.0,93.0,0.98742,3.31,0.4,13.6 +6.9,0.58,0.58,8.2,0.032,29.0,169.0,0.99275,3.28,0.44,12.2 +7.2,0.23,0.25,18.8,0.085,19.0,111.0,1.00044,3.1,0.51,8.7 +7.1,0.2,0.27,9.6,0.037,19.0,105.0,0.99444,3.04,0.37,10.5 +6.8,0.15,0.41,12.9,0.044,79.5,183.0,0.99742,3.24,0.78,10.2 +7.0,0.22,0.26,9.2,0.027,37.0,122.0,0.99228,3.06,0.34,12.5 +6.4,0.16,0.44,1.2,0.051,39.0,122.0,0.99058,3.11,0.75,11.3 +6.8,0.15,0.41,12.9,0.044,79.5,183.0,0.99742,3.24,0.78,10.2 +6.8,0.31,0.3,8.0,0.028,33.0,122.0,0.99164,3.13,0.63,12.6 +6.8,0.15,0.41,12.9,0.044,79.5,183.0,0.99742,3.24,0.78,10.2 +7.6,0.3,0.37,1.6,0.087,27.0,177.0,0.99438,3.09,0.5,9.8 +6.0,0.16,0.27,12.0,0.03,39.0,98.0,0.99402,3.15,0.34,10.8 +7.1,0.21,0.35,2.5,0.04,41.0,186.0,0.99128,3.32,0.56,12.5 +7.0,0.22,0.26,9.2,0.027,37.0,122.0,0.99228,3.06,0.34,12.5 +5.6,0.21,0.24,4.4,0.027,37.0,150.0,0.991,3.3,0.31,11.5 +7.4,0.22,0.26,8.8,0.027,23.0,112.0,0.9931,2.98,0.41,11.4 +7.1,0.2,0.27,9.6,0.037,19.0,105.0,0.99444,3.04,0.37,10.5 +6.8,0.31,0.3,8.0,0.028,33.0,122.0,0.99164,3.13,0.63,12.6 +7.2,0.23,0.25,18.8,0.085,19.0,111.0,1.00044,3.1,0.51,8.7 +6.4,0.15,0.4,1.3,0.053,61.0,146.0,0.99112,3.17,0.68,11.0 +6.4,0.16,0.44,1.2,0.051,39.0,122.0,0.99058,3.11,0.75,11.3 +6.8,0.15,0.41,12.9,0.044,79.5,182.0,0.99742,3.24,0.78,10.2 +6.3,0.22,0.34,1.2,0.036,32.0,96.0,0.98961,3.06,0.74,11.6 +7.6,0.3,0.37,1.6,0.087,27.0,177.0,0.99438,3.09,0.5,9.8 +7.0,0.3,0.27,1.5,0.076,24.0,145.0,0.99344,3.1,0.52,10.1 +6.6,0.26,0.22,18.15,0.05,23.0,139.0,0.99904,3.06,0.5,9.2 +7.5,0.24,0.31,13.1,0.05,26.0,180.0,0.99884,3.05,0.53,9.1 +7.5,0.24,0.31,13.1,0.05,26.0,180.0,0.99884,3.05,0.53,9.1 +7.5,0.24,0.31,13.1,0.05,26.0,180.0,0.99884,3.05,0.53,9.1 +7.5,0.24,0.31,13.1,0.05,26.0,180.0,0.99884,3.05,0.53,9.1 +6.6,0.15,0.34,1.0,0.037,45.0,79.0,0.98949,2.96,0.5,11.7 +6.7,0.34,0.43,1.6,0.041,29.0,114.0,0.99014,3.23,0.44,12.6 +7.7,0.35,0.46,11.8,0.088,61.0,183.0,0.99786,2.86,0.47,9.0 +6.7,0.31,0.09,1.4,0.039,53.0,141.0,0.99206,3.12,0.44,10.1 +4.7,0.67,0.09,1.0,0.02,5.0,9.0,0.98722,3.3,0.34,13.6 +7.5,0.24,0.31,13.1,0.05,26.0,180.0,0.99884,3.05,0.53,9.1 +6.3,0.2,0.18,10.6,0.045,57.0,159.0,0.99666,3.09,0.54,9.2 +6.6,0.28,0.23,10.4,0.049,45.0,190.0,0.99754,3.12,0.51,8.8 +8.5,0.18,0.3,1.1,0.028,34.0,95.0,0.99272,2.83,0.36,10.0 +6.5,0.35,0.38,7.4,0.036,20.0,196.0,0.99712,3.47,0.48,9.1 +6.8,0.22,0.26,1.2,0.041,29.0,182.0,0.99104,3.04,0.35,11.2 +6.3,0.18,0.24,3.4,0.053,20.0,119.0,0.99373,3.11,0.52,9.2 +6.6,0.26,0.22,18.15,0.05,23.0,139.0,0.99904,3.06,0.5,9.2 +6.6,0.3,0.45,8.0,0.038,54.0,200.0,0.9956,3.18,0.48,9.5 +6.3,0.34,0.27,2.5,0.024,40.0,152.0,0.99095,3.35,0.6,11.9 +7.7,0.3,0.23,2.0,0.068,28.0,138.0,0.99382,3.11,0.62,9.8 +7.7,0.31,0.23,2.0,0.069,29.0,134.0,0.99382,3.11,0.62,9.8 +5.7,0.265,0.28,6.9,0.036,46.0,150.0,0.99299,3.36,0.44,10.8 +5.4,0.255,0.33,1.2,0.051,29.0,122.0,0.99048,3.37,0.66,11.3 +6.6,0.26,0.28,9.4,0.028,13.0,121.0,0.99254,3.17,0.34,12.1 +4.8,0.17,0.28,2.9,0.03,22.0,111.0,0.9902,3.38,0.34,11.3 +5.7,0.265,0.28,6.9,0.036,46.0,150.0,0.99299,3.36,0.44,10.8 +6.2,0.2,0.33,5.4,0.028,21.0,75.0,0.99012,3.36,0.41,13.5 +7.5,0.28,0.41,1.3,0.044,11.0,126.0,0.99293,3.28,0.45,10.3 +6.2,0.22,0.2,20.8,0.035,58.0,184.0,1.00022,3.11,0.53,9.0 +7.0,0.34,0.26,10.3,0.041,51.0,166.0,0.99382,3.08,0.35,11.6 +7.5,0.28,0.41,1.3,0.044,11.0,126.0,0.99293,3.28,0.45,10.3 +6.5,0.19,0.34,1.6,0.029,39.0,116.0,0.98954,3.21,0.68,12.5 +6.0,0.21,0.29,13.1,0.042,28.0,125.0,0.99936,3.39,0.45,8.6 +6.1,0.22,0.46,1.8,0.16,34.0,74.0,0.9884,3.19,0.33,13.4 +6.5,0.32,0.48,8.0,0.026,18.0,88.0,0.99144,3.22,0.79,12.7 +7.1,0.21,0.72,1.6,0.167,65.0,120.0,0.99324,2.97,0.51,9.2 +5.6,0.26,0.18,1.4,0.034,18.0,135.0,0.99174,3.32,0.35,10.2 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +7.4,0.27,0.28,1.8,0.04,45.0,121.0,0.99043,3.02,0.4,11.9 +6.8,0.22,0.3,10.6,0.07,67.0,194.0,0.99654,2.89,0.42,9.0 +6.2,0.24,0.25,12.5,0.055,47.0,134.0,0.99758,3.3,0.51,9.0 +6.3,0.28,0.29,6.8,0.051,40.0,143.0,0.99374,3.43,0.59,11.0 +7.0,0.15,0.28,14.7,0.051,29.0,149.0,0.99792,2.96,0.39,9.0 +5.5,0.17,0.23,2.9,0.039,10.0,108.0,0.99243,3.28,0.5,10.0 +6.5,0.26,0.34,1.4,0.04,25.0,184.0,0.99216,3.29,0.46,10.7 +6.6,0.27,0.33,1.4,0.042,24.0,183.0,0.99215,3.29,0.46,10.7 +5.4,0.46,0.15,2.1,0.026,29.0,130.0,0.98953,3.39,0.77,13.4 +7.8,0.19,0.32,7.4,0.015,47.0,124.0,0.99278,2.99,0.39,11.0 +5.5,0.17,0.23,2.9,0.039,10.0,108.0,0.99243,3.28,0.5,10.0 +6.5,0.26,0.34,1.4,0.04,25.0,184.0,0.99216,3.29,0.46,10.7 +6.6,0.27,0.33,1.4,0.042,24.0,183.0,0.99215,3.29,0.46,10.7 +7.8,0.19,0.32,7.4,0.015,47.0,124.0,0.99278,2.99,0.39,11.0 +7.8,0.2,0.32,5.0,0.016,31.0,101.0,0.99186,2.99,0.39,11.0 +6.1,0.17,0.28,2.5,0.028,22.0,98.0,0.99072,3.16,0.37,11.1 +7.4,0.2,0.35,6.1,0.025,10.0,40.0,0.99244,2.79,0.52,10.9 +6.7,0.39,0.24,2.7,0.017,22.0,80.0,0.99084,3.03,0.37,11.5 +5.4,0.46,0.15,2.1,0.026,29.0,130.0,0.98953,3.39,0.77,13.4 +6.9,0.4,0.17,12.9,0.033,59.0,186.0,0.99754,3.08,0.49,9.4 +6.9,0.4,0.17,12.9,0.033,59.0,186.0,0.99754,3.08,0.49,9.4 +6.9,0.4,0.17,12.9,0.033,59.0,186.0,0.99754,3.08,0.49,9.4 +6.3,0.24,0.29,13.7,0.035,53.0,134.0,0.99567,3.17,0.38,10.6 +6.9,0.4,0.17,12.9,0.033,59.0,186.0,0.99754,3.08,0.49,9.4 +7.4,0.27,0.31,2.4,0.014,15.0,143.0,0.99094,3.03,0.65,12.0 +6.1,0.27,0.28,9.8,0.042,61.0,125.0,0.99532,3.14,0.42,10.2 +6.3,0.24,0.29,13.7,0.035,53.0,134.0,0.99567,3.17,0.38,10.6 +5.0,0.61,0.12,1.3,0.009,65.0,100.0,0.9874,3.26,0.37,13.5 +6.7,0.42,0.39,12.1,0.04,61.0,248.0,0.99794,3.31,0.58,9.7 +6.5,0.33,0.28,6.1,0.018,41.0,103.0,0.99122,3.24,0.32,12.2 +6.9,0.33,0.31,7.7,0.04,29.0,135.0,0.99226,3.11,0.57,12.3 +6.5,0.33,0.28,6.1,0.018,41.0,103.0,0.99122,3.24,0.32,12.2 +6.3,0.15,0.3,1.4,0.022,38.0,100.0,0.99099,3.42,0.57,11.4 +6.5,0.32,0.45,7.7,0.022,31.0,97.0,0.99134,3.2,0.7,12.7 +6.7,0.42,0.39,12.1,0.04,61.0,248.0,0.99794,3.31,0.58,9.7 +7.4,0.25,0.29,6.8,0.02,31.0,113.0,0.99338,3.13,0.29,10.8 +7.6,0.27,0.3,9.2,0.018,23.0,96.0,0.9938,3.08,0.29,11.0 +6.4,0.27,0.45,8.3,0.05,52.0,196.0,0.9955,3.18,0.48,9.5 +6.5,0.25,0.27,17.4,0.064,29.0,140.0,0.99776,3.2,0.49,10.1 +5.6,0.19,0.31,2.7,0.027,11.0,100.0,0.98964,3.46,0.4,13.2 +7.4,0.29,0.48,12.8,0.037,61.5,182.0,0.99808,3.02,0.34,8.8 +6.4,0.34,0.44,8.2,0.043,54.0,201.0,0.99551,3.18,0.48,9.5 +6.6,0.27,0.52,8.1,0.044,53.0,202.0,0.99548,3.18,0.48,9.5 +6.6,0.26,0.52,8.2,0.047,52.0,191.0,0.99541,3.16,0.47,9.5 +6.4,0.27,0.45,8.3,0.05,52.0,196.0,0.9955,3.18,0.48,9.5 +6.5,0.26,0.5,8.0,0.051,46.0,197.0,0.99536,3.18,0.47,9.5 +6.8,0.25,0.3,11.8,0.043,53.0,133.0,0.99524,3.03,0.58,10.4 +6.3,0.32,0.26,12.0,0.049,63.0,170.0,0.9961,3.14,0.55,9.9 +5.5,0.24,0.45,1.7,0.046,22.0,113.0,0.99224,3.22,0.48,10.0 +6.5,0.25,0.27,17.4,0.064,29.0,140.0,0.99776,3.2,0.49,10.1 +6.6,0.13,0.29,13.9,0.056,33.0,95.0,0.99702,3.17,0.39,9.4 +7.0,0.39,0.21,10.7,0.098,13.0,91.0,0.99657,3.03,0.47,9.3 +7.9,0.21,0.39,2.0,0.057,21.0,138.0,0.99176,3.05,0.52,10.9 +7.0,0.3,0.28,2.2,0.042,21.0,177.0,0.99166,3.2,0.57,11.4 +8.1,0.2,0.3,1.3,0.036,7.0,49.0,0.99242,2.99,0.73,10.3 +8.3,0.18,0.3,1.1,0.033,20.0,57.0,0.99109,3.02,0.51,11.0 +7.9,0.21,0.39,2.0,0.057,21.0,138.0,0.99176,3.05,0.52,10.9 +7.2,0.17,0.34,6.4,0.042,16.0,111.0,0.99278,2.99,0.4,10.8 +8.1,0.2,0.3,1.3,0.036,7.0,49.0,0.99242,2.99,0.73,10.3 +8.3,0.18,0.3,1.1,0.033,20.0,57.0,0.99109,3.02,0.51,11.0 +7.0,0.39,0.21,10.7,0.098,13.0,91.0,0.99657,3.03,0.47,9.3 +6.8,0.21,0.62,6.4,0.041,7.0,113.0,0.99358,2.96,0.59,10.2 +6.9,0.21,0.62,6.3,0.042,7.0,109.0,0.99358,2.96,0.59,10.2 +7.2,0.17,0.34,6.4,0.042,16.0,111.0,0.99278,2.99,0.4,10.8 +6.8,0.26,0.34,15.1,0.06,42.0,162.0,0.99705,3.24,0.52,10.5 +7.2,0.28,0.38,2.0,0.052,23.0,156.0,0.9912,3.13,0.52,11.1 +7.9,0.21,0.39,2.0,0.057,21.0,138.0,0.99176,3.05,0.52,10.9 +7.0,0.3,0.28,2.2,0.042,21.0,177.0,0.99166,3.2,0.57,11.4 +7.4,0.34,0.28,12.1,0.049,31.0,149.0,0.99677,3.22,0.49,10.3 +6.3,0.43,0.32,8.8,0.042,18.0,106.0,0.99172,3.28,0.33,12.9 +6.8,0.41,0.3,8.8,0.045,28.0,131.0,0.9953,3.12,0.59,9.9 +6.3,0.4,0.24,5.1,0.036,43.0,131.0,0.99186,3.24,0.44,11.3 +5.1,0.35,0.26,6.8,0.034,36.0,120.0,0.99188,3.38,0.4,11.5 +5.1,0.35,0.26,6.8,0.034,36.0,120.0,0.99188,3.38,0.4,11.5 +6.3,0.3,0.2,3.7,0.039,34.0,132.0,0.99158,3.0,0.38,10.7 +6.9,0.28,0.28,12.2,0.042,52.0,139.0,0.99522,3.03,0.56,10.4 +7.0,0.33,0.28,5.7,0.033,39.0,204.0,0.99176,3.17,0.64,12.5 +6.7,0.26,0.49,8.1,0.052,48.0,197.0,0.99558,3.19,0.48,9.5 +7.3,0.24,0.3,2.5,0.042,31.0,104.0,0.9911,3.05,0.56,11.3 +6.7,0.46,0.21,4.0,0.034,12.0,88.0,0.99016,3.26,0.54,13.0 +5.1,0.35,0.26,6.8,0.034,36.0,120.0,0.99188,3.38,0.4,11.5 +5.1,0.23,0.18,1.0,0.053,13.0,99.0,0.98956,3.22,0.39,11.5 +6.3,0.4,0.24,5.1,0.036,43.0,131.0,0.99186,3.24,0.44,11.3 +7.1,0.44,0.23,5.8,0.035,24.0,100.0,0.99062,3.15,0.57,13.2 +4.8,0.26,0.23,10.6,0.034,23.0,111.0,0.99274,3.46,0.28,11.5 +6.8,0.31,0.19,3.5,0.086,30.0,130.0,0.993,2.83,0.44,9.6 +6.8,0.31,0.19,3.5,0.086,30.0,130.0,0.993,2.83,0.44,9.6 +7.0,0.15,0.29,16.4,0.058,45.0,110.0,0.9978,3.15,0.37,9.7 +6.5,0.41,0.22,4.8,0.052,49.0,142.0,0.9946,3.14,0.62,9.2 +6.2,0.31,0.23,3.3,0.052,34.0,113.0,0.99429,3.16,0.48,8.4 +8.0,0.27,0.33,1.2,0.05,41.0,103.0,0.99002,3.0,0.45,12.4 +8.0,0.27,0.33,1.2,0.05,41.0,103.0,0.99002,3.0,0.45,12.4 +6.5,0.41,0.22,4.8,0.052,49.0,142.0,0.9946,3.14,0.62,9.2 +6.2,0.31,0.23,3.3,0.052,34.0,113.0,0.99429,3.16,0.48,8.4 +6.7,0.37,0.25,2.5,0.028,24.0,84.0,0.9909,3.14,0.36,11.7 +6.6,0.21,0.5,8.7,0.036,41.0,191.0,0.99294,2.96,0.56,11.0 +7.5,0.26,0.31,1.6,0.032,36.0,109.0,0.99044,2.97,0.43,11.9 +7.5,0.34,0.28,4.0,0.028,46.0,100.0,0.98958,3.2,0.5,13.2 +6.7,0.37,0.25,2.5,0.028,24.0,84.0,0.9909,3.14,0.36,11.7 +6.4,0.32,0.23,16.2,0.055,36.0,176.0,0.9986,3.26,0.54,9.1 +6.7,0.24,0.32,9.0,0.023,20.0,109.0,0.99262,3.34,0.35,12.6 +6.4,0.32,0.23,16.2,0.055,36.0,176.0,0.9986,3.26,0.54,9.1 +7.1,0.39,0.79,1.4,0.194,23.0,90.0,0.99212,3.17,0.46,10.5 +8.2,0.31,0.43,7.0,0.047,18.0,87.0,0.99628,3.23,0.64,10.6 +6.7,0.24,0.32,9.0,0.023,20.0,109.0,0.99262,3.34,0.35,12.6 +5.9,0.17,0.29,3.1,0.03,32.0,123.0,0.98913,3.41,0.33,13.7 +5.9,0.2,0.23,1.5,0.037,38.0,93.0,0.99021,3.36,0.49,12.0 +6.6,0.32,0.26,4.6,0.031,26.0,120.0,0.99198,3.4,0.73,12.5 +5.9,0.12,0.27,4.8,0.03,40.0,110.0,0.99226,3.55,0.68,12.1 +5.9,0.18,0.29,4.6,0.032,68.0,137.0,0.99159,3.21,0.38,11.3 +5.9,0.2,0.23,1.5,0.037,38.0,93.0,0.99021,3.36,0.49,12.0 +5.4,0.17,0.27,2.7,0.049,28.0,104.0,0.99224,3.46,0.55,10.3 +6.1,0.21,0.3,6.3,0.039,47.0,136.0,0.99068,3.27,0.31,12.7 +7.3,0.25,0.26,7.2,0.048,52.0,207.0,0.99587,3.12,0.37,9.2 +7.3,0.25,0.26,7.2,0.048,52.0,207.0,0.99587,3.12,0.37,9.2 +6.2,0.22,0.3,12.4,0.054,108.0,152.0,0.99728,3.1,0.47,9.5 +6.5,0.27,0.19,6.6,0.045,98.0,175.0,0.99364,3.16,0.34,10.1 +6.5,0.27,0.19,6.6,0.045,98.0,175.0,0.99364,3.16,0.34,10.1 +6.6,0.39,0.22,4.0,0.038,17.0,98.0,0.99018,3.25,0.53,13.0 +6.0,0.31,0.38,4.8,0.04,41.0,101.0,0.98968,3.24,0.56,13.1 +8.4,0.23,0.32,1.3,0.048,59.0,113.0,0.99178,3.1,0.55,11.0 +7.3,0.25,0.26,7.2,0.048,52.0,207.0,0.99587,3.12,0.37,9.2 +6.0,0.22,0.25,11.1,0.056,112.0,177.0,0.9961,3.08,0.36,9.4 +6.2,0.22,0.3,12.4,0.054,108.0,152.0,0.99728,3.1,0.47,9.5 +6.1,0.23,0.27,9.8,0.055,74.0,134.0,0.99534,3.16,0.4,10.2 +6.5,0.27,0.19,6.6,0.045,98.0,175.0,0.99364,3.16,0.34,10.1 +7.3,0.36,0.54,13.3,0.054,63.0,193.0,0.99864,3.06,0.49,8.6 +7.6,0.37,0.51,11.7,0.094,58.0,181.0,0.99776,2.91,0.51,9.0 +6.7,0.26,0.51,8.0,0.062,50.0,194.0,0.99545,3.13,0.5,9.6 +7.4,0.22,0.27,1.6,0.057,45.0,98.0,0.99299,3.29,0.44,9.9 +6.1,0.22,0.28,16.55,0.059,54.0,135.0,0.99665,3.2,0.38,10.5 +7.1,0.28,0.31,1.5,0.053,20.0,98.0,0.99069,3.15,0.5,11.4 +6.5,0.35,0.31,10.2,0.069,58.0,170.0,0.99692,3.18,0.49,9.4 +6.8,0.73,0.2,6.6,0.054,25.0,65.0,0.99324,3.12,0.28,11.1 +6.0,0.28,0.24,17.8,0.047,42.0,111.0,0.99896,3.1,0.45,8.9 +6.0,0.28,0.24,17.8,0.047,42.0,111.0,0.99896,3.1,0.45,8.9 +7.1,0.2,0.37,1.5,0.049,28.0,129.0,0.99226,3.15,0.52,10.8 +6.8,0.33,0.31,7.4,0.045,34.0,143.0,0.99226,3.06,0.55,12.2 +6.0,0.28,0.24,17.8,0.047,42.0,111.0,0.99896,3.1,0.45,8.9 +7.2,0.24,0.36,2.0,0.029,21.0,63.0,0.99076,3.13,0.63,12.5 +6.8,0.33,0.31,7.4,0.045,34.0,143.0,0.99226,3.06,0.55,12.2 +7.2,0.24,0.36,2.0,0.029,21.0,63.0,0.99076,3.13,0.63,12.5 +6.0,0.28,0.24,17.8,0.047,42.0,111.0,0.99896,3.1,0.45,8.9 +6.2,0.27,0.26,12.1,0.046,43.0,127.0,0.9951,3.16,0.37,10.8 +6.4,0.38,0.26,8.2,0.043,28.0,98.0,0.99234,2.99,0.31,11.4 +7.1,0.2,0.37,1.5,0.049,28.0,129.0,0.99226,3.15,0.52,10.8 +6.0,0.21,0.3,8.7,0.036,47.0,127.0,0.99368,3.18,0.39,10.6 +7.0,0.34,0.1,3.5,0.044,17.0,63.0,0.9937,3.01,0.39,9.2 +5.9,0.435,0.16,6.4,0.031,21.0,134.0,0.99151,3.24,0.46,12.2 +7.0,0.25,0.33,2.1,0.021,17.0,76.0,0.99021,3.26,0.45,12.3 +6.7,0.26,0.29,7.7,0.038,40.0,179.0,0.99479,3.23,0.56,10.4 +7.0,0.24,0.3,12.3,0.035,72.0,172.0,0.9954,2.99,0.57,10.4 +8.5,0.23,0.34,1.3,0.035,54.0,110.0,0.99176,3.07,0.55,11.0 +6.0,0.21,0.3,8.7,0.036,47.0,127.0,0.99368,3.18,0.39,10.6 +7.0,0.34,0.1,3.5,0.044,17.0,63.0,0.9937,3.01,0.39,9.2 +4.8,0.65,0.12,1.1,0.013,4.0,10.0,0.99246,3.32,0.36,13.5 +6.1,0.22,0.38,2.8,0.144,12.0,65.0,0.9908,2.95,0.64,11.4 +5.8,0.27,0.26,3.5,0.071,26.0,69.0,0.98994,3.1,0.38,11.5 +5.0,0.455,0.18,1.9,0.036,33.0,106.0,0.98746,3.21,0.83,14.0 +6.5,0.33,0.3,3.8,0.036,34.0,88.0,0.99028,3.25,0.63,12.5 +6.5,0.33,0.3,3.8,0.036,34.0,88.0,0.99028,3.25,0.63,12.5 +6.7,0.31,0.3,2.4,0.038,30.0,83.0,0.98867,3.09,0.36,12.8 +6.2,0.39,0.24,4.8,0.037,45.0,138.0,0.99174,3.23,0.43,11.2 +6.2,0.39,0.24,4.8,0.037,45.0,138.0,0.99174,3.23,0.43,11.2 +7.1,0.37,0.3,6.2,0.04,49.0,139.0,0.99021,3.17,0.27,13.6 +7.2,0.23,0.82,1.3,0.149,70.0,109.0,0.99304,2.93,0.42,9.2 +6.5,0.33,0.3,3.8,0.036,34.0,88.0,0.99028,3.25,0.63,12.5 +7.2,0.25,0.32,1.5,0.054,24.0,105.0,0.99154,3.17,0.48,11.1 +6.2,0.39,0.24,4.8,0.037,45.0,138.0,0.99174,3.23,0.43,11.2 +4.7,0.455,0.18,1.9,0.036,33.0,106.0,0.98746,3.21,0.83,14.0 +7.1,0.37,0.3,6.2,0.04,49.0,139.0,0.99021,3.17,0.27,13.6 +6.2,0.28,0.51,7.9,0.056,49.0,206.0,0.9956,3.18,0.52,9.4 +6.4,0.35,0.28,1.6,0.037,31.0,113.0,0.98779,3.12,0.4,14.2 +6.6,0.31,0.28,1.4,0.035,28.0,107.0,0.98836,3.0,0.4,13.2 +7.4,0.25,0.37,2.6,0.05,24.0,132.0,0.99138,3.04,0.53,11.2 +7.3,0.36,0.34,14.8,0.057,46.0,173.0,0.99751,3.14,0.57,10.2 +6.7,0.31,0.3,2.4,0.038,30.0,83.0,0.98867,3.09,0.36,12.8 +8.6,0.31,0.3,0.9,0.045,16.0,109.0,0.99249,2.95,0.39,10.1 +8.6,0.31,0.3,0.9,0.045,16.0,109.0,0.99249,2.95,0.39,10.1 +8.6,0.22,0.33,1.2,0.031,38.0,95.0,0.99239,2.83,0.31,10.3 +6.9,0.14,0.29,9.9,0.056,30.0,91.0,0.99512,3.19,0.33,9.9 +6.5,0.22,0.31,3.9,0.046,17.0,106.0,0.99098,3.15,0.31,11.5 +6.6,0.32,0.47,15.6,0.063,27.0,173.0,0.99872,3.18,0.56,9.0 +6.6,0.32,0.47,15.6,0.063,27.0,173.0,0.99872,3.18,0.56,9.0 +6.1,0.28,0.26,1.5,0.03,25.0,101.0,0.98894,3.03,0.41,12.1 +6.2,0.3,0.28,1.6,0.036,28.0,106.0,0.988245,3.14,0.41,13.3 +6.9,0.22,0.28,7.8,0.05,43.0,116.0,0.99326,3.22,0.6,11.5 +8.7,0.31,0.21,5.6,0.039,28.0,67.0,0.99328,2.96,0.52,11.0 +7.3,0.27,0.3,1.3,0.04,26.0,84.0,0.99222,3.28,0.53,10.7 +7.0,0.46,0.2,16.7,0.046,50.0,184.0,0.99898,3.08,0.56,9.4 +5.7,0.23,0.25,7.95,0.042,16.0,108.0,0.99486,3.44,0.61,10.3 +6.5,0.36,0.36,6.7,0.185,51.5,151.0,0.99528,3.17,0.42,9.3 +8.2,0.18,0.38,1.1,0.04,41.0,92.0,0.99062,2.88,0.6,12.0 +6.2,0.27,0.32,6.3,0.048,47.0,159.0,0.99282,3.21,0.6,11.0 +6.9,0.4,0.37,8.9,0.053,36.0,148.0,0.996,3.16,0.5,9.3 +4.9,0.345,0.34,1.0,0.068,32.0,143.0,0.99138,3.24,0.4,10.1 +7.2,0.23,0.39,1.5,0.053,26.0,106.0,0.99166,3.18,0.47,11.1 +6.4,0.2,0.15,6.6,0.046,26.0,113.0,0.99408,2.99,0.58,9.9 +6.1,0.27,0.32,6.2,0.048,47.0,161.0,0.99281,3.22,0.6,11.0 +6.2,0.27,0.32,6.3,0.048,47.0,159.0,0.99282,3.21,0.6,11.0 +6.0,0.3,0.33,2.1,0.042,31.0,127.0,0.98964,3.32,0.42,12.5 +6.1,0.3,0.32,2.2,0.042,41.0,142.0,0.98952,3.31,0.44,12.7 +5.7,0.14,0.3,5.4,0.045,26.0,105.0,0.99469,3.32,0.45,9.3 +6.9,0.4,0.37,8.9,0.053,36.0,148.0,0.996,3.16,0.5,9.3 +4.9,0.345,0.34,1.0,0.068,32.0,143.0,0.99138,3.24,0.4,10.1 +6.3,0.33,0.2,17.9,0.066,36.0,161.0,0.9991,3.14,0.51,8.8 +7.0,0.16,0.3,2.6,0.043,34.0,90.0,0.99047,2.88,0.47,11.2 +8.4,0.22,0.3,1.3,0.038,45.0,122.0,0.99178,3.13,0.54,10.8 +6.3,0.33,0.2,17.9,0.066,36.0,161.0,0.9991,3.14,0.51,8.8 +7.0,0.16,0.3,2.6,0.043,34.0,90.0,0.99047,2.88,0.47,11.2 +5.4,0.24,0.18,2.3,0.05,22.0,145.0,0.99207,3.24,0.46,10.3 +7.7,0.31,0.36,4.3,0.026,15.0,87.0,0.99152,3.11,0.48,12.0 +5.6,0.185,0.19,7.1,0.048,36.0,110.0,0.99438,3.26,0.41,9.5 +5.6,0.185,0.19,7.1,0.048,36.0,110.0,0.99438,3.26,0.41,9.5 +6.6,0.43,0.24,11.9,0.04,54.0,159.0,0.99622,3.14,0.54,9.8 +7.6,0.39,0.46,11.7,0.084,55.0,170.0,0.99773,2.91,0.51,9.0 +7.2,0.58,0.27,5.8,0.032,40.0,118.0,0.99088,3.17,0.53,13.0 +6.0,0.34,0.32,3.8,0.044,13.0,116.0,0.99108,3.39,0.44,11.8 +7.5,0.35,0.48,12.4,0.056,61.0,176.5,0.99803,2.97,0.52,8.8 +7.3,0.38,0.23,6.5,0.05,18.0,102.0,0.99304,3.1,0.55,11.2 +5.4,0.185,0.19,7.1,0.048,36.0,110.0,0.99438,3.26,0.41,9.5 +6.3,0.27,0.51,7.6,0.049,35.0,200.0,0.99548,3.16,0.54,9.4 +6.5,0.29,0.52,7.9,0.049,35.0,192.0,0.99551,3.16,0.51,9.5 +6.4,0.17,0.3,2.8,0.034,33.0,125.0,0.99152,3.03,0.49,10.4 +6.7,0.18,0.31,10.6,0.035,42.0,143.0,0.99572,3.08,0.49,9.8 +6.4,0.17,0.3,2.8,0.034,33.0,125.0,0.99152,3.03,0.49,10.4 +6.8,0.37,0.67,1.5,0.175,16.0,98.0,0.99244,3.06,0.56,10.3 +6.3,0.27,0.51,7.6,0.049,35.0,200.0,0.99548,3.16,0.54,9.4 +6.5,0.29,0.52,7.9,0.049,35.0,192.0,0.99551,3.16,0.51,9.5 +6.1,0.24,0.26,1.7,0.033,61.0,134.0,0.9903,3.19,0.81,11.9 +7.0,0.32,0.29,7.6,0.025,35.0,124.0,0.99162,3.15,0.65,12.8 +6.9,0.27,0.25,7.5,0.03,18.0,117.0,0.99116,3.09,0.38,13.0 +6.5,0.29,0.53,1.7,0.04,41.0,192.0,0.9922,3.26,0.59,10.4 +6.5,0.29,0.52,1.7,0.034,41.0,193.0,0.99223,3.25,0.59,10.4 +6.1,0.22,0.25,12.1,0.035,54.0,135.0,0.99481,3.21,0.4,10.7 +6.3,0.22,0.27,4.5,0.036,81.0,157.0,0.9928,3.05,0.76,10.7 +6.1,0.24,0.26,1.7,0.033,61.0,134.0,0.9903,3.19,0.81,11.9 +5.6,0.23,0.25,8.0,0.043,31.0,101.0,0.99429,3.19,0.42,10.4 +7.0,0.32,0.29,7.6,0.025,35.0,124.0,0.99162,3.15,0.65,12.8 +6.8,0.11,0.27,8.6,0.044,45.0,104.0,0.99454,3.2,0.37,9.9 +6.8,0.11,0.27,8.6,0.044,45.0,104.0,0.99454,3.2,0.37,9.9 +7.3,0.23,0.41,14.6,0.048,73.0,223.0,0.99863,3.16,0.71,9.4 +6.1,0.2,0.17,1.6,0.048,46.0,129.0,0.991,3.3,0.43,11.4 +6.8,0.11,0.27,8.6,0.044,45.0,104.0,0.99454,3.2,0.37,9.9 +7.3,0.23,0.41,14.6,0.048,73.0,223.0,0.99863,3.16,0.71,9.4 +6.9,0.2,0.41,1.1,0.06,36.0,104.0,0.99317,2.99,0.39,9.2 +6.7,0.19,0.32,3.7,0.041,26.0,76.0,0.99173,2.9,0.57,10.5 +6.7,0.28,0.34,8.9,0.048,32.0,111.0,0.99455,3.25,0.54,11.0 +6.7,0.28,0.34,8.9,0.048,32.0,111.0,0.99455,3.25,0.54,11.0 +8.0,0.37,0.31,4.7,0.038,3.0,127.0,0.99186,2.9,0.72,12.1 +6.7,0.28,0.34,8.9,0.048,32.0,111.0,0.99455,3.25,0.54,11.0 +6.0,0.26,0.29,3.1,0.041,37.0,144.0,0.98944,3.22,0.39,12.8 +6.4,0.24,0.49,5.8,0.053,25.0,120.0,0.9942,3.01,0.98,10.5 +6.4,0.24,0.49,5.8,0.053,25.0,120.0,0.9942,3.01,0.98,10.5 +6.4,0.24,0.49,5.8,0.053,25.0,120.0,0.9942,3.01,0.98,10.5 +6.4,0.25,0.57,1.0,0.062,21.0,122.0,0.99238,3.0,0.4,9.5 +6.1,0.25,0.48,15.8,0.052,25.0,94.0,0.99782,3.07,0.45,9.2 +6.8,0.14,0.35,1.5,0.047,40.0,117.0,0.99111,3.07,0.72,11.1 +6.5,0.38,0.26,5.2,0.042,33.0,112.0,0.99067,3.06,0.5,12.3 +6.8,0.14,0.35,1.5,0.047,40.0,117.0,0.99111,3.07,0.72,11.1 +5.4,0.15,0.32,2.5,0.037,10.0,51.0,0.98878,3.04,0.58,12.6 +6.4,0.25,0.57,1.0,0.062,21.0,122.0,0.99238,3.0,0.4,9.5 +6.1,0.25,0.48,15.8,0.052,25.0,94.0,0.99782,3.07,0.45,9.2 +6.8,0.22,0.32,5.9,0.054,40.0,152.0,0.9938,3.2,0.57,10.8 +7.2,0.21,0.29,3.1,0.044,39.0,122.0,0.99143,3.0,0.6,11.3 +6.0,0.26,0.29,3.1,0.041,37.0,144.0,0.98944,3.22,0.39,12.8 +6.4,0.24,0.49,5.8,0.053,25.0,120.0,0.9942,3.01,0.98,10.5 +6.5,0.46,0.24,11.5,0.051,56.0,171.0,0.99588,3.08,0.56,9.8 +6.5,0.18,0.48,18.0,0.054,56.0,183.0,1.00038,2.98,0.61,8.5 +6.2,0.32,0.12,4.8,0.054,6.0,97.0,0.99424,3.16,0.5,9.3 +7.2,0.4,0.24,8.5,0.055,45.0,151.0,0.99626,3.2,0.52,9.2 +5.9,0.23,0.24,1.6,0.037,32.0,115.0,0.99076,3.21,0.51,11.4 +6.4,0.18,0.48,18.0,0.054,56.0,183.0,1.00038,2.98,0.61,8.5 +6.2,0.32,0.12,4.8,0.054,6.0,97.0,0.99424,3.16,0.5,9.3 +6.4,0.37,0.12,5.9,0.056,6.0,91.0,0.99536,3.06,0.46,8.4 +7.0,0.23,0.42,1.1,0.062,35.0,100.0,0.99318,3.04,0.4,9.2 +7.2,0.4,0.24,8.5,0.055,45.0,151.0,0.99626,3.2,0.52,9.2 +7.6,0.19,0.37,13.1,0.033,52.0,151.0,0.99726,3.18,0.79,10.4 +6.0,0.28,0.27,4.1,0.046,50.0,147.0,0.99126,3.27,0.56,11.6 +6.2,0.32,0.45,2.9,0.029,37.0,94.0,0.98998,3.25,0.6,12.4 +7.6,0.19,0.37,13.1,0.033,52.0,151.0,0.99726,3.18,0.79,10.4 +6.4,0.26,0.26,1.1,0.052,22.0,176.0,0.99304,3.09,0.54,9.3 +5.9,0.25,0.27,1.5,0.029,37.0,81.0,0.9892,3.2,0.46,12.2 +6.1,0.28,0.3,7.75,0.031,33.0,139.0,0.99296,3.22,0.46,11.0 +6.9,0.19,0.38,1.15,0.023,30.0,105.0,0.99047,3.11,0.38,11.4 +6.4,0.29,0.57,1.0,0.06,15.0,120.0,0.9924,3.06,0.41,9.5 +6.8,0.27,0.22,17.8,0.034,16.0,116.0,0.9989,3.07,0.53,9.2 +7.5,0.26,0.38,5.7,0.021,23.0,125.0,0.99338,3.13,0.62,11.1 +6.8,0.27,0.22,17.8,0.034,16.0,116.0,0.9989,3.07,0.53,9.2 +6.4,0.2,0.22,7.4,0.032,53.0,172.0,0.99404,3.24,0.58,11.0 +7.3,0.33,0.22,1.4,0.041,40.0,177.0,0.99287,3.14,0.48,9.9 +7.3,0.34,0.22,1.4,0.044,43.0,176.0,0.99286,3.14,0.46,9.9 +6.4,0.29,0.57,1.0,0.06,15.0,120.0,0.9924,3.06,0.41,9.5 +6.1,1.1,0.16,4.4,0.033,8.0,109.0,0.99058,3.35,0.47,12.4 +6.3,0.24,0.29,1.6,0.052,48.0,185.0,0.9934,3.21,0.5,9.4 +6.2,0.24,0.22,7.9,0.053,45.0,149.0,0.99545,3.23,0.52,9.3 +7.4,0.16,0.27,15.5,0.05,25.0,135.0,0.9984,2.9,0.43,8.7 +7.4,0.16,0.27,15.5,0.05,25.0,135.0,0.9984,2.9,0.43,8.7 +7.4,0.16,0.27,15.5,0.05,25.0,135.0,0.9984,2.9,0.43,8.7 +7.4,0.16,0.27,15.5,0.05,25.0,135.0,0.9984,2.9,0.43,8.7 +7.2,0.17,0.28,17.55,0.05,33.0,154.0,0.99971,2.94,0.43,9.0 +6.9,0.19,0.35,13.5,0.038,49.0,118.0,0.99546,3.0,0.63,10.7 +6.9,0.19,0.35,13.5,0.038,49.0,118.0,0.99546,3.0,0.63,10.7 +6.8,0.16,0.36,1.3,0.034,32.0,98.0,0.99058,3.02,0.58,11.3 +7.4,0.16,0.27,15.5,0.05,25.0,135.0,0.9984,2.9,0.43,8.7 +6.8,0.3,0.27,11.6,0.028,22.0,97.0,0.99314,2.96,0.38,11.7 +6.2,0.24,0.22,7.9,0.053,45.0,149.0,0.99545,3.23,0.52,9.3 +7.4,0.16,0.27,15.5,0.05,25.0,135.0,0.9984,2.9,0.43,8.7 +7.2,0.17,0.28,17.55,0.05,33.0,154.0,0.99971,2.94,0.43,9.0 +6.8,0.3,0.27,11.6,0.028,22.0,97.0,0.99314,2.96,0.38,11.7 +6.5,0.43,0.18,13.15,0.032,25.0,131.0,0.99565,3.23,0.51,10.7 +6.6,0.17,0.36,1.9,0.036,38.0,110.0,0.99056,3.05,0.54,11.4 +6.9,0.19,0.35,13.5,0.038,49.0,118.0,0.99546,3.0,0.63,10.7 +6.8,0.16,0.36,1.3,0.034,32.0,98.0,0.99058,3.02,0.58,11.3 +6.4,0.41,0.01,6.1,0.048,20.0,70.0,0.99362,3.19,0.42,10.0 +6.4,0.41,0.01,6.1,0.048,20.0,70.0,0.99362,3.19,0.42,10.0 +7.4,0.36,0.32,1.9,0.036,27.0,119.0,0.99196,3.15,0.49,11.2 +6.1,0.17,0.21,1.9,0.09,44.0,130.0,0.99255,3.07,0.41,9.7 +5.5,0.28,0.21,1.6,0.032,23.0,85.0,0.99027,3.42,0.42,12.5 +6.6,0.5,0.26,11.3,0.029,32.0,110.0,0.99302,3.27,0.78,12.9 +7.1,0.44,0.27,8.4,0.057,60.0,160.0,0.99257,3.16,0.36,11.8 +6.9,0.38,0.28,8.3,0.062,22.0,166.0,0.99506,3.16,0.72,10.6 +7.1,0.44,0.27,8.4,0.057,60.0,160.0,0.99257,3.16,0.36,11.8 +6.2,0.24,0.28,12.2,0.049,54.0,133.0,0.9952,3.19,0.37,10.7 +6.1,0.28,0.27,8.0,0.048,41.0,162.0,0.99498,3.21,0.51,9.9 +7.6,0.26,0.32,1.3,0.048,23.0,76.0,0.9903,2.96,0.46,12.0 +7.5,0.16,0.38,12.7,0.043,70.5,163.0,0.99706,3.15,0.82,10.4 +6.5,0.36,0.16,1.3,0.054,11.0,107.0,0.99398,3.19,0.39,8.5 +6.6,0.35,0.19,10.5,0.06,15.0,82.0,0.99588,3.13,0.38,9.9 +5.7,0.25,0.26,12.5,0.049,52.5,120.0,0.99691,3.08,0.45,9.4 +7.4,0.37,0.26,9.6,0.05,33.0,134.0,0.99608,3.13,0.46,10.4 +5.7,0.25,0.21,1.5,0.044,21.0,108.0,0.99142,3.3,0.59,11.0 +5.8,0.23,0.21,1.5,0.044,21.0,110.0,0.99138,3.3,0.57,11.0 +5.4,0.265,0.28,7.8,0.052,27.0,91.0,0.99432,3.19,0.38,10.4 +5.7,0.25,0.27,10.8,0.05,58.0,116.0,0.99592,3.1,0.5,9.8 +5.7,0.25,0.26,12.5,0.049,52.5,106.0,0.99691,3.08,0.45,9.4 +5.9,0.23,0.28,8.6,0.046,37.0,142.0,0.99432,3.23,0.53,10.6 +6.2,0.3,0.32,1.2,0.052,32.0,185.0,0.99266,3.28,0.44,10.1 +6.5,0.33,0.24,14.5,0.048,20.0,96.0,0.99456,3.06,0.3,11.5 +7.4,0.26,0.29,3.7,0.048,14.0,73.0,0.9915,3.06,0.45,11.4 +7.0,0.2,0.4,1.1,0.058,30.0,93.0,0.99322,3.03,0.38,9.2 +6.5,0.21,0.42,1.1,0.059,33.0,101.0,0.9927,3.12,0.38,9.7 +7.3,0.25,0.27,3.8,0.047,16.0,79.0,0.99173,3.07,0.46,11.3 +6.8,0.27,0.24,4.6,0.098,36.0,127.0,0.99412,3.15,0.49,9.6 +6.7,0.24,0.3,10.2,0.07,44.0,179.0,0.99666,2.86,0.46,8.9 +6.4,0.14,0.28,7.9,0.057,21.0,82.0,0.99425,3.26,0.36,10.0 +6.4,0.5,0.2,2.4,0.059,19.0,112.0,0.99314,3.18,0.4,9.2 +6.6,0.41,0.27,10.7,0.11,20.0,103.0,0.99672,3.08,0.41,9.0 +6.4,0.25,0.28,4.9,0.03,29.0,98.0,0.99024,3.09,0.58,12.8 +6.6,0.41,0.27,10.7,0.11,20.0,103.0,0.99672,3.08,0.41,9.0 +8.0,0.25,0.35,1.1,0.054,13.0,136.0,0.99366,3.08,0.55,9.5 +6.4,0.14,0.28,7.9,0.057,21.0,82.0,0.99425,3.26,0.36,10.0 +6.6,0.21,0.34,5.6,0.046,30.0,140.0,0.99299,3.22,0.38,11.0 +6.4,0.5,0.2,2.4,0.059,19.0,112.0,0.99314,3.18,0.4,9.2 +6.3,0.29,0.23,14.2,0.037,24.0,99.0,0.99528,3.08,0.38,10.6 +6.9,0.37,0.23,9.5,0.057,54.0,166.0,0.99568,3.23,0.42,10.0 +6.9,0.37,0.23,9.5,0.057,54.0,166.0,0.99568,3.23,0.42,10.0 +5.7,0.31,0.28,4.1,0.03,22.0,86.0,0.99062,3.31,0.38,11.7 +6.9,0.45,0.27,4.7,0.035,17.0,80.0,0.99058,3.12,0.36,12.5 +6.9,0.3,0.45,1.4,0.039,36.0,122.0,0.99059,3.07,0.47,11.1 +5.3,0.23,0.56,0.9,0.041,46.0,141.0,0.99119,3.16,0.62,9.7 +6.8,0.3,0.26,20.3,0.037,45.0,150.0,0.99727,3.04,0.38,12.3 +6.7,0.28,0.42,3.5,0.035,43.0,105.0,0.99021,3.18,0.38,12.2 +5.0,0.255,0.22,2.7,0.043,46.0,153.0,0.99238,3.75,0.76,11.3 +7.6,0.4,0.27,1.2,0.053,23.0,193.0,0.99164,3.22,0.38,11.6 +5.5,0.21,0.25,1.2,0.04,18.0,75.0,0.99006,3.31,0.56,11.3 +6.0,0.2,0.25,2.0,0.041,30.0,95.0,0.99078,3.27,0.56,11.1 +6.1,0.17,0.29,1.1,0.041,32.0,92.0,0.99036,3.26,0.57,11.2 +7.5,0.21,0.29,1.5,0.046,35.0,107.0,0.99123,3.15,0.45,11.3 +7.3,0.26,0.32,1.2,0.041,29.0,94.0,0.98978,3.07,0.45,12.0 +6.2,0.35,0.2,18.1,0.069,33.0,158.0,0.99908,3.15,0.5,8.8 +6.2,0.35,0.2,18.1,0.069,33.0,158.0,0.99908,3.15,0.5,8.8 +6.5,0.43,0.31,3.6,0.046,19.0,143.0,0.99022,3.15,0.34,12.0 +6.5,0.4,0.31,3.5,0.046,22.0,147.0,0.99024,3.15,0.31,12.0 +7.4,0.28,0.5,12.1,0.049,48.0,122.0,0.9973,3.01,0.44,9.0 +6.3,0.23,0.22,17.45,0.054,42.0,151.0,0.99853,3.12,0.6,9.3 +6.2,0.34,0.25,12.1,0.059,33.0,171.0,0.99769,3.14,0.56,8.7 +6.6,0.44,0.32,3.0,0.095,13.0,75.0,0.98954,3.1,0.63,12.8 +6.0,0.13,0.36,1.6,0.052,23.0,72.0,0.98974,3.1,0.5,11.5 +6.3,0.17,0.23,5.7,0.048,44.0,147.0,0.99382,3.08,0.54,10.0 +6.3,0.18,0.22,5.6,0.047,45.0,147.0,0.99383,3.09,0.54,10.0 +6.7,0.31,0.34,6.8,0.059,51.0,215.0,0.99538,3.33,0.56,10.3 +6.6,0.33,0.32,15.6,0.054,62.0,227.0,0.99734,3.25,0.56,10.4 +6.3,0.34,0.31,6.0,0.02,18.0,68.0,0.98981,3.22,0.29,13.4 +6.8,0.29,0.32,1.8,0.032,18.0,130.0,0.99095,3.05,0.62,11.2 +7.4,0.31,0.26,8.6,0.048,47.0,206.0,0.9964,3.26,0.36,9.1 +7.4,0.31,0.26,8.6,0.048,47.0,206.0,0.9964,3.26,0.36,9.1 +5.7,0.25,0.27,11.5,0.04,24.0,120.0,0.99411,3.33,0.31,10.8 +6.8,0.27,0.28,7.8,0.038,26.0,89.0,0.9915,3.24,0.34,12.5 +5.9,0.26,0.24,2.4,0.046,27.0,132.0,0.99234,3.63,0.73,11.3 +5.9,0.65,0.23,5.0,0.035,20.0,128.0,0.99016,3.46,0.48,12.8 +7.4,0.31,0.26,8.6,0.048,47.0,206.0,0.9964,3.26,0.36,9.1 +6.6,0.23,0.32,1.5,0.041,8.0,72.0,0.98949,3.22,0.39,12.7 +6.8,0.18,0.35,5.4,0.054,53.0,143.0,0.99287,3.1,0.54,11.0 +6.8,0.28,0.29,11.9,0.052,51.0,149.0,0.99544,3.02,0.58,10.4 +6.8,0.28,0.29,11.9,0.052,51.0,149.0,0.99544,3.02,0.58,10.4 +5.9,0.27,0.27,9.0,0.051,43.0,136.0,0.9941,3.25,0.53,10.7 +6.1,0.25,0.28,10.0,0.055,56.0,131.0,0.994,3.22,0.35,10.9 +6.8,0.28,0.29,11.9,0.052,51.0,149.0,0.99544,3.02,0.58,10.4 +6.8,0.26,0.29,11.9,0.052,54.0,160.0,0.99546,3.03,0.58,10.4 +7.1,0.13,0.29,15.5,0.064,56.0,115.5,0.99737,3.16,0.41,9.7 +6.8,0.18,0.35,5.4,0.054,53.0,143.0,0.99287,3.1,0.54,11.0 +6.2,0.2,0.25,15.0,0.055,8.0,120.0,0.99767,3.19,0.53,9.6 +5.8,0.24,0.28,1.4,0.038,40.0,76.0,0.98711,3.1,0.29,13.9 +7.6,0.48,0.31,9.4,0.046,6.0,194.0,0.99714,3.07,0.61,9.4 +7.4,0.26,0.32,3.7,0.032,29.0,193.0,0.99134,3.1,0.67,12.5 +6.2,0.2,0.25,15.0,0.055,8.0,120.0,0.99767,3.19,0.53,9.6 +6.1,0.3,0.47,1.4,0.049,50.0,187.0,0.9927,3.19,0.45,9.5 +6.2,0.32,0.5,6.5,0.048,61.0,186.0,0.9948,3.19,0.45,9.6 +6.1,0.3,0.47,1.4,0.049,50.0,187.0,0.9927,3.19,0.45,9.5 +6.3,0.34,0.52,6.3,0.047,63.0,186.0,0.99481,3.18,0.44,9.6 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +7.2,0.26,0.38,1.5,0.061,12.0,120.0,0.99192,3.18,0.46,10.4 +7.0,0.31,0.35,1.6,0.063,13.0,119.0,0.99184,3.22,0.5,10.7 +6.6,0.22,0.35,1.4,0.05,23.0,83.0,0.99019,3.17,0.48,12.0 +5.8,0.23,0.31,3.5,0.044,35.0,158.0,0.98998,3.19,0.37,12.1 +6.3,0.17,0.32,1.0,0.04,39.0,118.0,0.98886,3.31,0.4,13.1 +6.0,0.19,0.26,1.4,0.039,30.0,104.0,0.98998,3.32,0.41,12.4 +6.7,0.21,0.34,1.5,0.035,45.0,123.0,0.98949,3.24,0.36,12.6 +7.4,0.16,0.3,13.7,0.056,33.0,168.0,0.99825,2.9,0.44,8.7 +6.6,0.22,0.37,1.6,0.04,31.0,101.0,0.99009,3.15,0.66,12.0 +6.8,0.34,0.27,5.2,0.06,14.0,169.0,0.99252,3.27,0.57,11.6 +7.1,0.34,0.86,1.4,0.174,36.0,99.0,0.99288,2.92,0.5,9.3 +6.3,0.24,0.22,11.9,0.05,65.0,179.0,0.99659,3.06,0.58,9.3 +6.9,0.35,0.39,2.4,0.048,25.0,157.0,0.99133,3.2,0.54,11.1 +6.8,0.24,0.33,3.2,0.049,68.0,161.0,0.99324,3.1,0.69,10.2 +6.4,0.25,0.33,1.7,0.037,35.0,113.0,0.99164,3.23,0.66,10.6 +5.8,0.19,0.33,4.2,0.038,49.0,133.0,0.99107,3.16,0.42,11.3 +6.9,0.24,0.4,15.4,0.052,81.0,198.0,0.9986,3.2,0.69,9.4 +6.5,0.31,0.61,13.0,0.053,31.0,123.0,0.99708,3.09,0.5,9.3 +6.6,0.25,0.32,5.6,0.039,15.0,68.0,0.99163,2.96,0.52,11.1 +7.5,0.38,0.56,9.7,0.055,15.0,170.0,0.99605,3.13,0.65,9.9 +6.2,0.3,0.3,2.5,0.041,29.0,82.0,0.99065,3.31,0.61,11.8 +6.4,0.33,0.28,4.0,0.04,24.0,81.0,0.9903,3.26,0.64,12.6 +6.9,0.24,0.4,15.4,0.052,81.0,198.0,0.9986,3.2,0.69,9.4 +7.6,0.27,0.32,1.2,0.043,23.0,72.0,0.99236,3.06,0.68,10.5 +5.9,0.24,0.34,2.0,0.037,40.0,108.0,0.98948,3.19,0.5,12.3 +5.3,0.33,0.3,1.2,0.048,25.0,119.0,0.99045,3.32,0.62,11.3 +6.4,0.21,0.21,5.1,0.097,21.0,105.0,0.9939,3.07,0.46,9.6 +7.0,0.22,0.3,1.4,0.04,14.0,63.0,0.98985,3.2,0.33,12.0 +7.8,0.27,0.35,1.2,0.05,36.0,140.0,0.99138,3.09,0.45,11.2 +6.7,0.2,0.24,6.5,0.044,28.0,100.0,0.99348,3.12,0.33,10.2 +8.1,0.27,0.33,1.3,0.045,26.0,100.0,0.99066,2.98,0.44,12.4 +6.7,0.2,0.24,6.5,0.044,28.0,100.0,0.99348,3.12,0.33,10.2 +7.1,0.45,0.24,2.7,0.04,24.0,87.0,0.98862,2.94,0.38,13.4 +5.8,0.22,0.29,1.3,0.036,25.0,68.0,0.98865,3.24,0.35,12.6 +6.3,0.3,0.48,7.4,0.053,34.0,149.0,0.99472,3.18,0.53,9.8 +7.9,0.36,0.53,12.9,0.049,63.0,139.0,0.99792,2.94,0.45,9.1 +8.1,0.27,0.33,1.3,0.045,26.0,100.0,0.99066,2.98,0.44,12.4 +8.0,0.24,0.33,1.2,0.044,28.0,101.0,0.99035,3.03,0.43,12.5 +6.7,0.41,0.27,2.6,0.033,25.0,85.0,0.99086,3.05,0.34,11.7 +6.7,0.24,0.31,2.3,0.044,37.0,113.0,0.99013,3.29,0.46,12.9 +6.2,0.3,0.32,1.3,0.054,27.0,183.0,0.99266,3.3,0.43,10.1 +6.9,0.26,0.38,10.5,0.044,33.0,139.0,0.99517,3.06,0.5,10.3 +6.7,0.41,0.27,2.6,0.033,25.0,85.0,0.99086,3.05,0.34,11.7 +5.9,0.32,0.2,14.4,0.05,29.0,144.0,0.99666,3.24,0.41,10.3 +6.1,0.25,0.3,1.2,0.036,42.0,107.0,0.991,3.34,0.56,10.8 +5.6,0.23,0.29,3.1,0.023,19.0,89.0,0.99068,3.25,0.51,11.2 +6.6,0.23,0.32,1.7,0.024,26.0,102.0,0.99084,3.29,0.6,11.8 +6.0,0.17,0.21,6.0,0.05,26.0,134.0,0.9939,3.08,0.54,9.8 +7.1,0.38,0.42,11.8,0.041,32.0,193.0,0.99624,3.04,0.49,10.0 +6.6,0.31,0.37,6.2,0.052,13.0,164.0,0.99602,3.24,0.39,8.8 +6.5,0.38,0.53,1.4,0.142,5.0,69.0,0.9926,3.14,0.52,10.1 +7.0,0.44,0.24,12.1,0.056,68.0,210.0,0.99718,3.05,0.5,9.5 +7.0,0.44,0.24,12.1,0.056,68.0,210.0,0.99718,3.05,0.5,9.5 +7.0,0.44,0.24,12.1,0.056,68.0,210.0,0.99718,3.05,0.5,9.5 +6.1,0.38,0.14,3.9,0.06,27.0,113.0,0.99344,3.07,0.34,9.2 +8.0,0.33,0.32,4.6,0.041,31.0,180.0,0.99184,2.92,0.74,12.2 +7.0,0.44,0.24,12.1,0.056,68.0,210.0,0.99718,3.05,0.5,9.5 +6.0,0.19,0.29,1.2,0.046,29.0,92.0,0.99033,3.22,0.53,11.3 +6.3,0.28,0.34,8.1,0.038,44.0,129.0,0.99248,3.26,0.29,12.1 +6.1,0.38,0.14,3.9,0.06,27.0,113.0,0.99344,3.07,0.34,9.2 +5.3,0.43,0.11,1.1,0.029,6.0,51.0,0.99076,3.51,0.48,11.2 +5.4,0.22,0.35,6.5,0.029,26.0,87.0,0.99092,3.29,0.44,12.5 +6.2,0.345,0.27,10.1,0.056,38.0,187.0,0.99486,3.31,0.56,10.6 +5.6,0.255,0.57,10.7,0.056,66.0,171.0,0.99464,3.25,0.61,10.4 +5.2,0.2,0.27,3.2,0.047,16.0,93.0,0.99235,3.44,0.53,10.1 +6.2,0.29,0.23,12.4,0.048,33.0,201.0,0.99612,3.11,0.56,9.9 +6.3,0.26,0.25,5.2,0.046,11.0,133.0,0.99202,2.97,0.68,11.0 +6.0,0.22,0.23,5.0,0.045,10.0,122.0,0.99261,2.94,0.63,10.0 +7.5,0.35,0.37,2.5,0.066,29.0,89.0,0.98964,3.14,0.42,12.7 +6.6,0.39,0.28,9.2,0.036,10.0,92.0,0.99206,3.07,0.35,12.1 +6.3,0.23,0.33,6.9,0.052,23.0,118.0,0.9938,3.23,0.46,10.4 +6.3,0.22,0.3,2.0,0.05,23.0,120.0,0.99204,3.24,0.47,10.4 +6.4,0.29,0.18,15.0,0.04,21.0,116.0,0.99736,3.14,0.5,9.2 +6.4,0.29,0.18,15.0,0.04,21.0,116.0,0.99736,3.14,0.5,9.2 +7.5,0.23,0.3,1.2,0.03,27.0,80.0,0.99192,3.05,0.68,10.5 +6.4,0.29,0.18,15.0,0.04,21.0,116.0,0.99736,3.14,0.5,9.2 +5.7,0.28,0.36,1.8,0.041,38.0,90.0,0.99002,3.27,0.98,11.9 +6.5,0.26,0.24,10.8,0.042,47.0,130.0,0.996,3.08,0.4,10.1 +6.4,0.27,0.29,3.9,0.034,62.0,140.0,0.99237,3.1,0.59,11.1 +5.9,0.22,0.29,4.2,0.037,69.0,144.0,0.99214,3.13,0.74,10.8 +6.8,0.26,0.26,2.0,0.019,23.5,72.0,0.99041,3.16,0.47,11.8 +7.6,0.36,0.48,13.5,0.038,44.0,116.0,0.9982,3.04,0.48,9.2 +7.6,0.35,0.47,13.3,0.037,42.0,116.0,0.99822,3.04,0.5,9.2 +5.7,0.18,0.26,2.2,0.023,21.0,95.0,0.9893,3.07,0.54,12.3 +6.6,0.36,0.47,1.4,0.145,26.0,124.0,0.99274,3.09,0.56,10.1 +5.9,0.14,0.2,1.6,0.04,26.0,114.0,0.99105,3.25,0.45,11.4 +5.5,0.23,0.19,2.2,0.044,39.0,161.0,0.99209,3.19,0.43,10.4 +6.7,0.11,0.26,14.8,0.053,44.0,95.0,0.99676,3.2,0.35,9.8 +7.0,0.24,0.24,1.8,0.047,29.0,91.0,0.99251,3.3,0.43,9.9 +6.7,0.11,0.26,14.8,0.053,44.0,95.0,0.99676,3.2,0.35,9.8 +5.3,0.47,0.1,1.3,0.036,11.0,74.0,0.99082,3.48,0.54,11.2 +7.5,0.29,0.24,9.9,0.058,25.0,115.0,0.99567,3.15,0.46,10.9 +6.0,0.33,0.26,5.1,0.051,16.0,119.0,0.99416,3.15,0.41,9.2 +6.0,0.33,0.26,5.1,0.051,16.0,119.0,0.99416,3.15,0.41,9.2 +5.8,0.32,0.23,1.5,0.033,39.0,121.0,0.9887,2.96,0.35,12.0 +5.8,0.3,0.23,1.5,0.034,37.0,121.0,0.98871,2.96,0.34,12.1 +3.8,0.31,0.02,11.1,0.036,20.0,114.0,0.99248,3.75,0.44,12.4 +6.2,0.36,0.22,5.25,0.038,44.0,145.0,0.99184,3.22,0.4,11.2 +6.0,0.31,0.27,2.3,0.042,19.0,120.0,0.98952,3.32,0.41,12.7 +6.9,0.52,0.54,7.9,0.036,23.0,169.0,0.99267,3.26,0.47,12.2 +7.0,0.55,0.05,8.0,0.036,19.0,164.0,0.99269,3.26,0.46,12.2 +5.8,0.2,0.16,1.4,0.042,44.0,99.0,0.98912,3.23,0.37,12.2 +6.2,0.36,0.22,5.25,0.038,44.0,145.0,0.99184,3.22,0.4,11.2 +6.0,0.31,0.27,2.3,0.042,19.0,120.0,0.98952,3.32,0.41,12.7 +6.0,0.29,0.27,2.3,0.044,20.0,117.0,0.9895,3.31,0.41,12.7 +5.7,0.22,0.29,3.5,0.04,27.0,146.0,0.98999,3.17,0.36,12.1 +7.1,0.46,0.23,13.7,0.045,44.0,192.0,0.9981,3.11,0.53,9.4 +6.6,0.21,0.3,9.9,0.041,64.0,174.0,0.995,3.07,0.5,10.1 +6.9,0.42,0.2,15.4,0.043,57.0,201.0,0.99848,3.08,0.54,9.4 +5.7,0.22,0.2,16.0,0.044,41.0,113.0,0.99862,3.22,0.46,8.9 +5.7,0.22,0.2,16.0,0.044,41.0,113.0,0.99862,3.22,0.46,8.9 +5.7,0.22,0.2,16.0,0.044,41.0,113.0,0.99862,3.22,0.46,8.9 +5.7,0.22,0.2,16.0,0.044,41.0,113.0,0.99862,3.22,0.46,8.9 +5.2,0.31,0.2,2.4,0.027,27.0,117.0,0.98886,3.56,0.45,13.0 +7.2,0.22,0.35,5.5,0.054,37.0,183.0,0.99474,3.08,0.5,10.3 +5.6,0.18,0.29,2.3,0.04,5.0,47.0,0.99126,3.07,0.45,10.1 +6.2,0.24,0.27,16.8,0.04,48.0,129.0,0.99691,3.23,0.38,10.5 +5.7,0.22,0.2,16.0,0.044,41.0,113.0,0.99862,3.22,0.46,8.9 +5.7,0.26,0.24,17.8,0.059,23.0,124.0,0.99773,3.3,0.5,10.1 +5.7,0.26,0.24,17.8,0.059,23.0,124.0,0.99773,3.3,0.5,10.1 +6.0,0.2,0.26,6.8,0.049,22.0,93.0,0.9928,3.15,0.42,11.0 +6.0,0.2,0.26,6.8,0.049,22.0,93.0,0.9928,3.15,0.42,11.0 +6.0,0.2,0.26,6.8,0.049,22.0,93.0,0.9928,3.15,0.42,11.0 +6.0,0.2,0.26,6.8,0.049,22.0,93.0,0.9928,3.15,0.42,11.0 +7.6,0.28,0.17,1.6,0.046,28.0,117.0,0.99288,3.08,0.43,10.0 +7.0,0.2,0.33,4.7,0.03,25.0,76.0,0.99202,2.88,0.54,10.5 +6.6,0.26,0.27,11.8,0.048,28.0,112.0,0.99606,2.87,0.49,9.7 +5.7,0.26,0.24,17.8,0.059,23.0,124.0,0.99773,3.3,0.5,10.1 +7.2,0.21,0.36,15.7,0.045,68.0,183.0,0.99922,3.25,0.76,9.4 +6.9,0.22,0.32,5.8,0.041,20.0,119.0,0.99296,3.17,0.55,11.2 +7.2,0.21,0.36,15.7,0.045,68.0,183.0,0.99922,3.25,0.76,9.4 +7.4,0.22,0.28,9.0,0.046,22.0,121.0,0.99468,3.1,0.55,10.8 +7.2,0.21,0.36,15.7,0.045,68.0,183.0,0.99922,3.25,0.76,9.4 +6.9,0.22,0.32,5.8,0.041,20.0,119.0,0.99296,3.17,0.55,11.2 +7.0,0.2,0.35,8.8,0.037,31.0,103.0,0.99388,3.13,0.49,11.0 +5.6,0.26,0.0,10.2,0.038,13.0,111.0,0.99315,3.44,0.46,12.4 +6.3,0.28,0.3,6.6,0.208,60.0,154.0,0.99478,3.1,0.4,9.4 +6.4,0.29,0.3,6.5,0.209,62.0,156.0,0.99478,3.1,0.4,9.4 +7.2,0.34,0.23,8.9,0.105,22.0,155.0,0.99692,3.01,0.58,9.5 +7.1,0.39,0.39,11.1,0.034,25.0,204.0,0.99616,3.05,0.52,10.0 +6.9,0.26,0.29,4.2,0.043,33.0,114.0,0.9902,3.16,0.31,12.5 +6.1,0.24,0.25,1.6,0.044,24.0,115.0,0.9921,3.39,0.59,10.9 +5.9,0.25,0.24,7.4,0.044,21.0,113.0,0.99462,3.38,0.58,10.5 +6.1,0.24,0.27,11.5,0.05,51.0,133.0,0.99476,3.22,0.37,10.8 +6.5,0.22,0.27,1.6,0.039,36.0,116.0,0.99178,3.38,0.57,11.0 +6.2,0.26,0.29,2.0,0.036,16.0,87.0,0.99081,3.33,0.61,11.8 +6.6,0.34,0.25,4.8,0.038,16.0,121.0,0.99198,3.36,0.71,12.6 +5.6,0.225,0.24,9.8,0.054,59.0,140.0,0.99545,3.17,0.39,10.2 +7.1,0.23,0.28,1.9,0.046,33.0,103.0,0.98997,3.12,0.31,12.0 +6.9,0.26,0.29,4.2,0.043,33.0,114.0,0.9902,3.16,0.31,12.5 +6.4,0.27,0.3,1.6,0.04,19.0,86.0,0.99089,3.32,0.65,11.5 +6.3,0.41,0.22,7.3,0.035,23.0,117.0,0.99172,3.2,0.39,11.94 +6.7,0.41,0.24,5.4,0.035,33.0,115.0,0.9901,3.12,0.44,12.89333333 +7.2,0.585,0.2,10.4,0.086,17.0,94.0,0.99681,3.13,0.4,9.4 +6.7,0.34,0.26,1.9,0.038,58.0,138.0,0.9893,3.0,0.47,12.2 +6.3,0.41,0.22,7.3,0.035,23.0,117.0,0.99172,3.2,0.39,11.94 +6.7,0.41,0.24,5.4,0.035,33.0,115.0,0.9901,3.12,0.44,12.89333333 +6.4,0.26,0.35,7.7,0.056,45.0,191.0,0.99527,3.16,0.5,9.5 +6.3,0.28,0.22,11.5,0.036,27.0,150.0,0.99445,3.0,0.33,10.6 +7.4,0.16,0.33,1.2,0.042,47.0,121.0,0.99198,3.04,0.68,10.5 +8.4,0.27,0.3,2.2,0.037,36.0,129.0,0.99085,2.89,0.3,11.46666667 +5.9,0.2,0.28,1.0,0.043,45.0,100.0,0.99033,3.4,0.41,11.4 +6.4,0.24,0.26,8.2,0.054,47.0,182.0,0.99538,3.12,0.5,9.5 +7.4,0.38,0.34,8.3,0.052,44.0,168.0,0.99627,3.11,0.52,9.2 +6.4,0.24,0.26,8.2,0.054,47.0,182.0,0.99538,3.12,0.5,9.5 +6.4,0.42,0.19,9.3,0.043,28.0,145.0,0.99433,3.23,0.53,10.98 +6.4,0.23,0.26,8.1,0.054,47.0,181.0,0.9954,3.12,0.49,9.4 +6.4,0.24,0.26,8.2,0.054,47.0,182.0,0.99538,3.12,0.5,9.5 +7.4,0.38,0.34,8.3,0.052,44.0,168.0,0.99627,3.11,0.52,9.2 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +6.8,0.24,0.29,2.0,0.044,15.0,96.0,0.99232,3.23,0.64,10.4 +7.3,0.19,0.27,13.9,0.057,45.0,155.0,0.99807,2.94,0.41,8.8 +7.4,0.27,0.52,15.7,0.054,36.0,139.0,0.99788,3.04,0.62,10.03333333 +5.7,0.28,0.35,1.2,0.052,39.0,141.0,0.99108,3.44,0.69,11.3 +5.8,0.22,0.25,1.5,0.024,21.0,109.0,0.99234,3.37,0.58,10.4 +6.7,0.27,0.69,1.2,0.176,36.0,106.0,0.99288,2.96,0.43,9.2 +7.1,0.2,0.35,3.2,0.034,21.0,107.0,0.99195,3.11,0.54,11.1 +6.7,0.27,0.69,1.2,0.176,36.0,106.0,0.99288,2.96,0.43,9.2 +7.1,0.23,0.3,2.6,0.034,62.0,148.0,0.99121,3.03,0.56,11.3 +7.6,0.31,0.52,13.2,0.042,61.0,148.0,0.99839,2.98,0.47,9.1 +7.2,0.34,0.28,10.4,0.108,43.0,187.0,0.99738,2.96,0.57,9.4 +7.0,0.36,0.25,5.7,0.015,14.0,73.0,0.98963,2.82,0.59,13.2 +6.4,0.31,0.28,2.5,0.039,34.0,137.0,0.98946,3.22,0.38,12.7 +7.3,0.28,0.35,1.6,0.054,31.0,148.0,0.99178,3.18,0.47,10.7 +7.4,0.16,0.3,1.4,0.064,34.0,166.0,0.99136,3.11,0.42,11.43333333 +6.4,0.31,0.27,7.4,0.049,48.0,169.0,0.99323,3.27,0.45,11.1 +6.4,0.31,0.28,2.5,0.039,34.0,137.0,0.98946,3.22,0.38,12.7 +6.2,0.29,0.29,5.6,0.046,35.0,178.0,0.99313,3.25,0.51,10.53333333 +5.9,0.28,0.34,3.6,0.04,50.0,194.0,0.9912,3.31,0.52,11.6 +6.5,0.23,0.2,7.5,0.05,44.0,179.0,0.99504,3.18,0.48,9.533333333 +7.2,0.34,0.2,5.8,0.062,52.0,203.0,0.99461,3.17,0.44,9.8 +7.3,0.28,0.35,1.6,0.054,31.0,148.0,0.99178,3.18,0.47,10.7 +6.5,0.2,0.33,1.5,0.039,36.0,110.0,0.99008,3.22,0.65,12.0 +6.2,0.24,0.27,2.9,0.039,30.0,123.0,0.98959,3.12,0.37,12.8 +7.1,0.31,0.25,11.2,0.048,32.0,136.0,0.99663,3.14,0.4,9.5 +6.4,0.29,0.21,9.65,0.041,36.0,119.0,0.99334,2.99,0.34,10.93333333 +6.3,0.19,0.33,10.1,0.063,63.0,133.0,0.99561,2.86,0.41,9.1 +5.9,0.29,0.28,3.2,0.035,16.0,117.0,0.98959,3.26,0.42,12.6 +7.1,0.31,0.25,11.2,0.048,32.0,136.0,0.99663,3.14,0.4,9.5 +6.5,0.3,0.28,11.45,0.041,29.0,109.0,0.99418,2.98,0.3,10.9 +6.4,0.29,0.21,9.65,0.041,36.0,119.0,0.99334,2.99,0.34,10.93333333 +6.5,0.22,0.19,4.5,0.096,16.0,115.0,0.9937,3.02,0.44,9.6 +7.0,0.23,0.28,2.7,0.053,16.0,92.0,0.99372,3.18,0.56,9.3 +7.1,0.23,0.23,3.5,0.038,23.0,112.0,0.99157,3.05,0.37,11.36666667 +6.1,0.26,0.28,1.7,0.043,24.0,98.0,0.98918,3.14,0.44,12.5 +6.4,0.35,0.21,2.1,0.051,46.0,171.0,0.9932,3.16,0.5,9.5 +6.0,0.32,0.32,4.8,0.041,40.0,186.0,0.99235,3.22,0.54,11.0 +6.1,0.34,0.21,5.0,0.042,17.0,133.0,0.99373,3.02,0.53,9.4 +6.5,0.13,0.27,2.6,0.035,32.0,76.0,0.9914,3.21,0.76,11.33333333 +6.5,0.315,0.2,6.6,0.041,9.0,126.0,0.99494,2.94,0.51,8.8 +6.1,0.34,0.21,5.0,0.042,17.0,133.0,0.99373,3.02,0.53,9.4 +5.7,0.31,0.29,7.3,0.05,33.0,143.0,0.99332,3.31,0.5,11.06666667 +6.4,0.3,0.27,5.0,0.058,27.0,151.0,0.99198,3.22,0.49,12.2 +7.0,0.24,0.26,1.7,0.041,31.0,110.0,0.99142,3.2,0.53,11.0 +6.5,0.13,0.27,2.6,0.035,32.0,76.0,0.9914,3.21,0.76,11.33333333 +6.4,0.26,0.21,8.2,0.05,51.0,182.0,0.99542,3.23,0.48,9.5 +6.4,0.26,0.21,8.2,0.05,51.0,182.0,0.99542,3.23,0.48,9.5 +6.0,0.27,0.31,5.0,0.043,54.0,170.0,0.9924,3.28,0.52,11.0 +7.1,0.21,0.33,1.2,0.039,34.0,97.0,0.99112,3.11,0.75,11.2 +6.7,0.26,0.29,7.1,0.036,28.0,100.0,0.99534,3.08,0.36,9.3 +6.3,0.28,0.22,9.5,0.04,30.0,111.0,0.99338,3.05,0.31,10.8 +6.2,0.25,0.44,15.8,0.057,39.0,167.0,0.99804,3.14,0.51,9.2 +7.3,0.22,0.37,15.5,0.048,70.0,203.0,0.99899,3.25,0.77,9.4 +6.2,0.25,0.44,15.8,0.057,39.0,167.0,0.99804,3.14,0.51,9.2 +6.4,0.18,0.28,17.05,0.047,53.0,139.0,0.99724,3.25,0.35,10.5 +6.3,0.2,0.26,12.7,0.046,60.0,143.0,0.99526,3.26,0.35,10.8 +6.6,0.24,0.22,12.3,0.051,35.0,146.0,0.99676,3.1,0.67,9.4 +7.4,0.27,0.26,11.8,0.053,55.0,173.0,0.99699,3.11,0.6,9.8 +7.4,0.27,0.26,11.8,0.053,55.0,173.0,0.99699,3.11,0.6,9.8 +7.4,0.27,0.26,11.8,0.053,55.0,173.0,0.99699,3.11,0.6,9.8 +6.6,0.24,0.22,12.3,0.051,35.0,146.0,0.99676,3.1,0.67,9.4 +7.4,0.27,0.26,11.8,0.053,55.0,173.0,0.99699,3.11,0.6,9.8 +7.1,0.38,0.29,13.6,0.041,30.0,137.0,0.99461,3.02,0.96,12.1 +6.8,0.43,0.26,5.2,0.043,40.0,176.0,0.99116,3.17,0.41,12.3 +5.2,0.22,0.46,6.2,0.066,41.0,187.0,0.99362,3.19,0.42,9.733333333 +5.9,0.29,0.16,7.9,0.044,48.0,197.0,0.99512,3.21,0.36,9.4 +5.9,0.29,0.16,7.9,0.044,48.0,197.0,0.99512,3.21,0.36,9.4 +6.3,0.29,0.29,3.3,0.037,32.0,140.0,0.9895,3.17,0.36,12.8 +6.3,0.19,0.32,2.8,0.046,18.0,80.0,0.99043,2.92,0.47,11.05 +5.7,0.29,0.16,7.9,0.044,48.0,197.0,0.99512,3.21,0.36,9.4 +6.3,0.29,0.29,3.3,0.037,32.0,140.0,0.9895,3.17,0.36,12.8 +5.7,0.24,0.47,6.3,0.069,35.0,182.0,0.99391,3.11,0.46,9.733333333 +5.8,0.3,0.38,4.9,0.039,22.0,86.0,0.98963,3.23,0.58,13.1 +7.1,0.27,0.27,10.4,0.041,26.0,114.0,0.99335,3.04,0.52,11.5 +5.8,0.3,0.38,4.9,0.039,22.0,86.0,0.98963,3.23,0.58,13.1 +7.1,0.27,0.27,10.4,0.041,26.0,114.0,0.99335,3.04,0.52,11.5 +6.3,0.305,0.22,16.0,0.061,26.0,141.0,0.99824,3.08,0.5,9.1 +5.7,0.24,0.47,6.3,0.069,35.0,182.0,0.99391,3.11,0.46,9.75 +6.2,0.22,0.28,2.2,0.04,24.0,125.0,0.9917,3.19,0.48,10.5 +6.6,0.22,0.23,17.3,0.047,37.0,118.0,0.99906,3.08,0.46,8.8 +6.6,0.22,0.23,17.3,0.047,37.0,118.0,0.99906,3.08,0.46,8.8 +6.6,0.22,0.23,17.3,0.047,37.0,118.0,0.99906,3.08,0.46,8.8 +6.6,0.22,0.23,17.3,0.047,37.0,118.0,0.99906,3.08,0.46,8.8 +6.2,0.22,0.28,2.2,0.04,24.0,125.0,0.9917,3.19,0.48,10.5 +6.2,0.22,0.28,2.2,0.04,24.0,125.0,0.9917,3.19,0.48,10.5 +6.6,0.22,0.23,17.3,0.047,37.0,118.0,0.99906,3.08,0.46,8.8 +6.1,0.22,0.5,6.6,0.045,30.0,122.0,0.99415,3.22,0.49,9.9 +6.2,0.21,0.52,6.5,0.047,28.0,123.0,0.99418,3.22,0.49,9.9 +6.3,0.32,0.26,12.3,0.044,24.0,205.0,0.99611,3.11,0.58,9.9 +6.9,0.44,0.27,5.0,0.038,33.0,166.0,0.99124,3.2,0.42,12.2 +6.1,0.31,0.34,2.8,0.042,59.5,162.0,0.99179,3.27,0.47,10.8 +8.1,0.36,0.59,13.6,0.051,60.0,134.0,0.99886,2.96,0.39,8.7 +6.6,0.38,0.28,2.8,0.043,17.0,67.0,0.98924,3.21,0.47,13.2 +6.7,0.24,0.26,5.4,0.03,15.0,94.0,0.99045,3.15,0.38,12.7 +6.9,0.56,0.26,10.9,0.06,55.0,193.0,0.9969,3.21,0.44,9.4 +7.2,0.24,0.24,1.7,0.045,18.0,161.0,0.99196,3.25,0.53,11.2 +6.5,0.29,0.3,9.15,0.051,25.0,166.0,0.99339,3.24,0.56,11.33333333 +6.7,0.28,0.28,4.5,0.051,14.0,92.0,0.99224,3.36,0.58,11.9 +6.5,0.29,0.3,9.15,0.051,25.0,166.0,0.99339,3.24,0.56,11.35 +6.1,0.21,0.19,1.4,0.046,51.0,131.0,0.99184,3.22,0.39,10.5 +7.2,0.47,0.16,5.9,0.048,14.0,125.0,0.99428,3.09,0.49,9.8 +6.7,0.34,0.31,16.4,0.051,20.0,146.0,0.99834,3.06,0.54,9.1 +6.6,0.27,0.25,1.2,0.033,36.0,111.0,0.98918,3.16,0.37,12.4 +6.7,0.34,0.31,16.4,0.051,20.0,146.0,0.99834,3.06,0.54,9.1 +7.2,0.47,0.16,5.9,0.048,14.0,125.0,0.99428,3.09,0.49,9.8 +5.0,0.35,0.25,7.8,0.031,24.0,116.0,0.99241,3.39,0.4,11.3 +5.0,0.35,0.25,7.8,0.031,24.0,116.0,0.99241,3.39,0.4,11.3 +4.4,0.46,0.1,2.8,0.024,31.0,111.0,0.98816,3.48,0.34,13.1 +6.6,0.38,0.29,2.9,0.035,15.0,101.0,0.98916,3.04,0.37,12.5 +7.3,0.3,0.25,2.5,0.045,32.0,122.0,0.99329,3.18,0.54,10.3 +6.4,0.28,0.22,12.8,0.039,51.0,150.0,0.99535,3.23,0.43,10.7 +6.9,0.29,0.25,12.2,0.04,29.0,136.0,0.99552,3.05,0.65,10.4 +6.3,0.3,0.19,7.7,0.049,47.0,184.0,0.99514,3.22,0.48,9.5 +6.3,0.39,0.22,2.8,0.048,53.0,173.0,0.99304,3.24,0.45,9.8 +6.6,0.38,0.29,2.9,0.035,15.0,101.0,0.98916,3.04,0.37,12.5 +6.6,0.18,0.26,17.3,0.051,17.0,149.0,0.9984,3.0,0.43,9.4 +6.0,0.28,0.29,19.3,0.051,36.0,174.0,0.99911,3.14,0.5,9.0 +6.0,0.28,0.29,19.3,0.051,36.0,174.0,0.99911,3.14,0.5,9.0 +6.0,0.28,0.29,19.3,0.051,36.0,174.0,0.99911,3.14,0.5,9.0 +6.6,0.35,0.26,2.7,0.045,19.0,129.0,0.98952,3.24,0.48,13.0 +5.9,0.22,0.18,6.4,0.041,28.0,120.0,0.99403,3.27,0.5,9.9 +6.6,0.18,0.26,17.3,0.051,17.0,149.0,0.9984,3.0,0.43,9.4 +7.7,0.28,0.24,2.4,0.044,29.0,157.0,0.99312,3.27,0.56,10.6 +7.1,0.42,0.2,2.8,0.038,28.0,109.0,0.98968,3.23,0.47,13.4 +6.7,0.32,0.32,1.7,0.031,31.0,114.0,0.98946,3.12,0.35,12.5 +6.6,0.26,0.56,15.4,0.053,32.0,141.0,0.9981,3.11,0.49,9.3 +6.6,0.26,0.56,15.4,0.053,32.0,141.0,0.9981,3.11,0.49,9.3 +6.2,0.32,0.24,4.1,0.051,34.0,149.0,0.99306,3.36,0.52,11.0 +6.3,0.25,0.27,6.6,0.054,40.0,158.0,0.99378,3.2,0.48,10.3 +6.2,0.21,0.24,1.2,0.051,31.0,95.0,0.99036,3.24,0.57,11.3 +6.4,0.23,0.27,2.1,0.042,35.0,100.0,0.99094,3.03,0.63,10.9 +4.7,0.145,0.29,1.0,0.042,35.0,90.0,0.9908,3.76,0.49,11.3 +6.2,0.2,0.28,1.1,0.039,24.0,78.0,0.9899,3.36,0.47,12.1 +7.0,0.28,0.28,1.4,0.039,12.0,83.0,0.99173,3.18,0.65,11.1 +7.1,0.36,0.2,1.6,0.271,24.0,140.0,0.99356,3.11,0.63,9.8 +5.7,0.25,0.22,9.8,0.049,50.0,125.0,0.99571,3.2,0.45,10.1 +5.7,0.22,0.33,1.9,0.036,37.0,110.0,0.98945,3.26,0.58,12.4 +6.0,0.2,0.38,1.3,0.034,37.0,104.0,0.98865,3.11,0.52,12.7 +6.4,0.32,0.26,7.9,0.05,53.0,180.0,0.99514,3.14,0.5,9.6 +6.4,0.32,0.26,7.9,0.05,53.0,180.0,0.99514,3.14,0.5,9.6 +6.0,0.555,0.26,4.5,0.053,17.0,126.0,0.9943,3.24,0.46,9.1 +5.9,0.22,0.45,22.6,0.12,55.0,122.0,0.99636,3.1,0.35,12.8 +6.4,0.32,0.26,7.9,0.05,53.0,180.0,0.99514,3.14,0.5,9.6 +6.2,0.3,0.33,3.5,0.037,37.0,155.0,0.98987,3.18,0.37,12.4 +5.8,0.28,0.18,1.2,0.058,7.0,108.0,0.99288,3.23,0.58,9.55 +5.8,0.555,0.26,4.5,0.053,17.0,126.0,0.9943,3.24,0.46,9.1 +6.7,0.31,0.33,2.0,0.033,12.0,74.0,0.99064,3.29,0.65,12.5 +6.4,0.15,0.25,7.8,0.05,13.0,68.0,0.99394,3.16,0.4,9.9 +6.4,0.13,0.28,0.9,0.045,32.0,87.0,0.99175,3.47,0.52,11.2 +6.7,0.48,0.49,2.9,0.03,28.0,122.0,0.98926,3.13,0.4,13.0 +6.7,0.48,0.49,2.9,0.03,28.0,122.0,0.98926,3.13,0.4,13.0 +5.8,0.3,0.33,3.5,0.033,25.0,116.0,0.99057,3.2,0.44,11.7 +6.1,0.28,0.23,4.2,0.038,13.0,95.0,0.98898,2.97,0.7,13.1 +6.0,0.19,0.37,9.7,0.032,17.0,50.0,0.9932,3.08,0.66,12.0 +6.8,0.31,0.25,10.5,0.043,30.0,165.0,0.9972,3.36,0.55,10.55 +7.5,0.24,0.29,1.1,0.046,34.0,84.0,0.9902,3.04,0.39,11.45 +6.8,0.23,0.39,16.1,0.053,71.0,194.0,0.9988,3.18,0.64,10.2 +7.5,0.24,0.29,1.1,0.046,34.0,84.0,0.9902,3.04,0.39,11.45 +6.3,0.29,0.3,8.1,0.212,60.0,150.0,0.9958,3.1,0.4,9.3 +6.8,0.2,0.25,6.2,0.052,22.0,106.0,0.9935,3.09,0.54,10.8 +5.2,0.38,0.26,7.7,0.053,20.0,103.0,0.9925,3.27,0.45,12.2 +7.8,0.27,0.33,2.4,0.053,36.0,175.0,0.992,3.2,0.55,11.0 +6.6,0.54,0.21,16.3,0.055,41.0,182.0,0.9986,3.35,0.54,10.4 +7.1,0.25,0.31,2.3,0.05,32.0,156.0,0.9914,3.14,0.51,11.4 +5.8,0.61,0.01,8.4,0.041,31.0,104.0,0.9909,3.26,0.72,14.05 +6.5,0.32,0.23,8.5,0.051,20.0,138.0,0.9943,3.03,0.42,10.7 +6.4,0.28,0.23,6.0,0.051,50.0,162.0,0.994,3.15,0.52,10.2 +6.6,0.19,0.28,1.1,0.044,38.0,100.0,0.9904,3.22,0.69,11.2 +5.1,0.305,0.13,1.75,0.036,17.0,73.0,0.99,3.4,0.51,12.33333333 +5.8,0.26,0.3,2.6,0.034,75.0,129.0,0.9902,3.2,0.38,11.5 +6.7,0.23,0.17,1.3,0.061,14.0,100.0,0.9925,3.07,0.55,9.5 +6.8,0.33,0.3,2.1,0.047,35.0,147.0,0.9886,3.24,0.56,13.4 +6.1,0.27,0.32,1.1,0.034,24.0,110.0,0.9898,3.36,0.4,12.5 +6.1,0.27,0.32,1.1,0.034,24.0,110.0,0.9898,3.36,0.4,12.5 +6.8,0.4,0.29,2.8,0.044,27.0,97.0,0.9904,3.12,0.42,11.2 +6.1,0.4,0.18,9.0,0.051,28.5,259.0,0.9964,3.19,0.5,8.8 +7.1,0.28,0.26,2.8,0.039,50.0,118.0,0.9908,3.06,0.59,11.2 +6.2,0.32,0.32,2.2,0.036,15.0,70.0,0.9899,3.16,0.48,12.7 +6.8,0.17,0.17,5.1,0.049,26.0,82.0,0.993,3.0,0.38,9.8 +9.0,0.2,0.33,3.5,0.049,10.0,40.0,0.9944,3.14,0.36,9.8 +5.8,0.13,0.22,12.7,0.058,24.0,183.0,0.9956,3.32,0.42,11.7 +5.8,0.31,0.31,7.5,0.052,55.0,230.0,0.9949,3.19,0.46,9.8 +6.3,0.36,0.2,2.0,0.048,48.0,191.0,0.9929,3.17,0.51,9.6 +9.0,0.2,0.33,3.5,0.049,10.0,40.0,0.9944,3.14,0.36,9.8 +6.7,0.18,0.25,14.3,0.048,79.0,149.0,0.9975,3.12,0.37,9.7 +6.6,0.16,0.25,9.8,0.049,59.5,137.0,0.995,3.16,0.38,10.0 +5.8,0.13,0.22,12.7,0.058,24.0,183.0,0.9956,3.32,0.42,11.7 +5.8,0.27,0.22,12.7,0.058,42.0,206.0,0.9946,3.32,0.38,12.3 +6.8,0.17,0.17,5.1,0.049,26.0,82.0,0.993,3.0,0.38,9.8 +6.4,0.37,0.19,3.5,0.068,18.0,101.0,0.9934,3.03,0.38,9.0 +7.3,0.26,0.53,12.7,0.047,60.5,164.5,0.9984,3.06,0.45,9.1 +7.3,0.28,0.54,12.9,0.049,62.0,162.5,0.9984,3.06,0.45,9.1 +7.3,0.28,0.54,12.9,0.049,62.0,162.5,0.9984,3.06,0.45,9.1 +5.8,0.12,0.21,1.3,0.056,35.0,121.0,0.9908,3.32,0.33,11.4 +6.1,0.25,0.18,10.5,0.049,41.0,124.0,0.9963,3.14,0.35,10.5 +6.4,0.24,0.27,1.5,0.04,35.0,105.0,0.98914,3.13,0.3,12.4 +7.3,0.26,0.53,12.7,0.047,60.5,156.0,0.9984,3.06,0.45,9.1 +7.3,0.28,0.54,12.9,0.049,62.0,152.0,0.9984,3.06,0.45,9.1 +8.3,0.18,0.37,1.2,0.049,6.0,94.0,0.9937,3.18,0.52,10.1 +7.1,0.09,0.3,6.2,0.032,24.0,134.0,0.993,2.99,0.39,10.9 +8.3,0.14,0.36,8.8,0.026,13.0,60.0,0.9956,3.13,0.35,11.05 +5.8,0.28,0.3,3.9,0.026,36.0,105.0,0.98963,3.26,0.58,12.75 +6.0,0.23,0.34,1.3,0.025,23.0,111.0,0.98961,3.36,0.37,12.7 +6.9,0.28,0.37,9.1,0.037,16.0,76.0,0.9948,3.05,0.54,11.1 +6.9,0.28,0.37,9.1,0.037,16.0,76.0,0.9948,3.05,0.54,11.1 +5.8,0.28,0.3,3.9,0.026,36.0,105.0,0.98963,3.26,0.58,12.75 +6.3,0.25,0.53,1.8,0.021,41.0,101.0,0.989315,3.19,0.31,13.0 +6.5,0.2,0.31,2.1,0.033,32.0,95.0,0.989435,2.96,0.61,12.0 +5.9,0.29,0.32,1.4,0.022,17.0,47.0,0.9899,3.35,0.35,11.5 +6.4,0.46,0.22,14.7,0.047,51.0,183.0,0.998275,3.39,0.6,10.5 +6.9,0.28,0.37,9.1,0.037,16.0,76.0,0.9948,3.05,0.54,11.1 +6.8,0.23,0.33,1.9,0.047,20.0,101.0,0.9914,3.1,0.46,11.1 +7.0,0.23,0.32,1.8,0.048,25.0,113.0,0.9915,3.11,0.47,11.1 +6.4,0.55,0.26,9.6,0.027,20.0,104.0,0.9924,3.22,0.73,13.1 +5.7,0.28,0.3,3.9,0.026,36.0,105.0,0.98963,3.26,0.58,12.75 +6.0,0.23,0.34,1.3,0.025,23.0,111.0,0.98961,3.36,0.37,12.7 +6.8,0.45,0.3,11.8,0.094,23.0,97.0,0.997,3.09,0.44,9.6 +6.1,0.2,0.4,1.9,0.028,32.0,138.0,0.9914,3.26,0.72,11.7 +6.1,0.37,0.46,12.0,0.042,61.0,210.0,0.997,3.17,0.59,9.7 +5.9,0.21,0.23,7.9,0.033,22.0,130.0,0.9944,3.38,0.59,10.9 +6.9,0.22,0.32,9.3,0.04,22.0,110.0,0.9958,3.34,0.54,10.7 +5.4,0.27,0.22,4.6,0.022,29.0,107.0,0.98889,3.33,0.54,13.8 +6.0,0.26,0.26,2.2,0.035,10.0,72.0,0.989465,3.11,0.48,12.15 +5.6,0.18,0.3,10.2,0.028,28.0,131.0,0.9954,3.49,0.42,10.8 +5.6,0.26,0.27,10.6,0.03,27.0,119.0,0.9947,3.4,0.34,10.7 +7.0,0.23,0.35,1.4,0.036,31.0,113.0,0.9912,3.16,0.48,10.8 +5.8,0.28,0.66,9.1,0.039,26.0,159.0,0.9965,3.66,0.55,10.8 +8.6,0.36,0.26,11.1,0.03,43.5,171.0,0.9948,3.03,0.49,12.0 +5.8,0.28,0.66,9.1,0.039,26.0,159.0,0.9965,3.66,0.55,10.8 +6.4,0.3,0.27,4.4,0.055,17.0,135.0,0.9925,3.23,0.44,12.2 +6.2,0.2,0.32,2.8,0.05,17.0,126.0,0.9936,3.18,0.55,9.4 +5.8,0.29,0.15,1.1,0.029,12.0,83.0,0.9898,3.3,0.4,11.4 +5.7,0.22,0.28,1.3,0.027,26.0,101.0,0.98948,3.35,0.38,12.5 +5.6,0.22,0.32,1.2,0.024,29.0,97.0,0.98823,3.2,0.46,13.05 +6.8,0.32,0.23,3.3,0.026,31.0,99.0,0.9896,3.1,0.4,12.4 +6.2,0.2,0.26,9.7,0.03,39.0,102.0,0.9908,3.08,0.56,12.9 +6.1,0.35,0.24,2.3,0.034,25.0,133.0,0.9906,3.34,0.59,12.0 +5.9,0.3,0.29,1.1,0.036,23.0,56.0,0.9904,3.19,0.38,11.3 +6.3,0.15,0.34,11.4,0.05,25.0,96.0,0.99754,3.21,0.49,10.0 +4.8,0.13,0.32,1.2,0.042,40.0,98.0,0.9898,3.42,0.64,11.8 +6.0,0.2,0.26,14.7,0.045,53.0,125.0,0.998365,2.99,0.69,9.4 +5.7,0.2,0.24,13.8,0.047,44.0,112.0,0.99837,2.97,0.66,8.8 +6.0,0.27,0.26,1.3,0.038,32.0,138.0,0.99125,3.46,0.43,11.1 +6.3,0.37,0.51,6.3,0.048,35.0,146.0,0.9943,3.1,1.01,10.5 +6.4,0.23,0.37,7.9,0.05,60.0,150.0,0.99488,2.86,0.49,9.3 +5.9,0.34,0.25,2.0,0.042,12.0,110.0,0.99034,3.02,0.54,11.4 +5.0,0.33,0.23,11.8,0.03,23.0,158.0,0.99322,3.41,0.64,11.8 +5.4,0.29,0.38,1.2,0.029,31.0,132.0,0.98895,3.28,0.36,12.4 +8.0,0.33,0.35,10.0,0.035,22.0,108.0,0.99457,3.12,0.36,11.6 +6.4,0.3,0.33,5.2,0.05,30.0,137.0,0.99304,3.26,0.58,11.1 +5.4,0.29,0.38,1.2,0.029,31.0,132.0,0.98895,3.28,0.36,12.4 +6.4,0.33,0.3,7.2,0.041,42.0,168.0,0.99331,3.22,0.49,11.1 +7.0,0.33,0.78,9.9,0.042,21.0,251.0,0.99435,3.01,0.55,11.0 +6.7,0.45,0.3,5.3,0.036,27.0,165.0,0.99122,3.12,0.46,12.2 +6.5,0.36,0.31,13.55,0.053,20.0,113.0,0.99544,3.2,0.56,11.0 +5.8,0.42,0.3,2.2,0.035,26.0,129.0,0.989,3.32,0.47,12.9 +7.1,0.39,0.3,9.9,0.037,29.0,124.0,0.99414,3.07,0.42,10.9 +6.7,0.53,0.29,4.3,0.069,20.0,114.0,0.99014,3.22,0.59,13.4 +6.7,0.66,0.0,13.0,0.033,32.0,75.0,0.99551,3.15,0.5,10.7 +6.5,0.36,0.31,13.55,0.053,20.0,113.0,0.99544,3.2,0.56,11.0 +6.5,0.16,0.33,1.0,0.027,23.0,75.0,0.9908,3.3,0.39,11.4 +8.3,0.22,0.34,1.1,0.043,20.0,116.0,0.9927,3.0,0.47,10.2 +6.9,0.23,0.35,6.9,0.03,45.0,116.0,0.99244,2.8,0.54,11.0 +6.4,0.17,0.34,13.4,0.044,45.0,139.0,0.99752,3.06,0.43,9.1 +5.0,0.33,0.18,4.6,0.032,40.0,124.0,0.99114,3.18,0.4,11.0 +6.8,0.38,0.29,9.9,0.037,40.0,146.0,0.99326,3.11,0.37,11.5 +6.5,0.29,0.32,3.0,0.036,38.0,93.0,0.9906,3.16,0.59,12.0 +6.9,0.29,0.32,5.8,0.04,16.0,112.0,0.993,3.04,0.58,11.2 +6.6,0.28,0.3,12.9,0.033,31.0,177.0,0.99479,3.12,0.39,11.2 +6.2,0.36,0.27,3.2,0.032,13.0,73.0,0.98942,2.9,0.69,12.6 +6.0,0.615,0.04,0.8,0.032,8.0,50.0,0.99036,3.14,0.4,11.0 +5.9,0.44,0.36,2.5,0.03,12.0,73.0,0.99201,3.22,0.48,10.8 +5.9,0.42,0.36,2.4,0.034,19.0,77.0,0.99184,3.25,0.48,10.9 +5.8,0.34,0.21,7.2,0.041,48.0,146.0,0.99441,3.16,0.49,9.8 +5.8,0.27,0.2,7.3,0.04,42.0,145.0,0.99442,3.15,0.48,9.8 +7.1,0.33,0.18,6.3,0.094,27.0,166.0,0.99474,2.9,0.49,9.5 +6.1,0.44,0.28,4.25,0.032,43.0,132.0,0.9916,3.26,0.47,11.26666667 +7.3,0.28,0.37,1.2,0.039,26.0,99.0,0.99198,3.01,0.62,10.8 +5.2,0.5,0.18,2.0,0.036,23.0,129.0,0.98949,3.36,0.77,13.4 +6.1,0.44,0.28,4.25,0.032,43.0,132.0,0.9916,3.26,0.47,11.3 +6.4,0.62,0.12,4.7,0.06,33.0,196.0,0.99556,3.22,0.48,8.9 +6.4,0.38,0.19,4.5,0.038,36.0,119.0,0.99151,3.07,0.42,11.2 +7.5,0.305,0.38,1.4,0.047,30.0,95.0,0.99158,3.22,0.52,11.5 +6.5,0.5,0.22,4.1,0.036,35.0,131.0,0.9902,3.26,0.55,13.0 +6.6,0.4,0.3,5.3,0.038,20.0,125.0,0.99204,3.36,0.73,12.6 +6.4,0.4,0.25,4.2,0.032,15.0,91.0,0.98988,3.26,0.52,13.1 +8.3,0.49,0.23,6.65,0.034,6.0,158.0,0.99344,3.05,0.48,11.2 +6.3,0.3,0.91,8.2,0.034,50.0,199.0,0.99394,3.39,0.49,11.7 +6.1,0.19,0.37,2.6,0.041,24.0,99.0,0.99153,3.18,0.5,10.9 +6.1,0.19,0.37,2.6,0.041,24.0,99.0,0.99153,3.18,0.5,10.9 +5.6,0.24,0.34,2.0,0.041,14.0,73.0,0.98981,3.04,0.45,11.6 +5.7,0.25,0.32,12.2,0.041,43.0,127.0,0.99524,3.23,0.53,10.4 +6.6,0.21,0.39,2.3,0.041,31.0,102.0,0.99221,3.22,0.58,10.9 +6.3,0.3,0.91,8.2,0.034,50.0,199.0,0.99394,3.39,0.49,11.7 +6.2,0.28,0.41,5.0,0.043,50.0,188.0,0.99318,3.23,0.64,10.8 +5.8,0.29,0.38,10.7,0.038,49.0,136.0,0.99366,3.11,0.59,11.2 +5.8,0.345,0.15,10.8,0.033,26.0,120.0,0.99494,3.25,0.49,10.0 +6.5,0.51,0.25,1.7,0.048,39.0,177.0,0.99212,3.28,0.57,10.56666667 +6.0,0.24,0.41,1.3,0.036,42.0,118.0,0.99018,3.04,0.64,11.73333333 +6.5,0.51,0.25,1.7,0.048,39.0,177.0,0.99212,3.28,0.57,10.6 +6.9,0.54,0.26,12.7,0.049,59.0,195.0,0.99596,3.26,0.54,10.5 +6.0,0.24,0.41,1.3,0.036,42.0,118.0,0.99018,3.04,0.64,11.75 +6.6,0.26,0.36,1.2,0.035,43.0,126.0,0.99094,3.01,0.63,11.4 +5.7,0.24,0.3,1.3,0.03,25.0,98.0,0.98968,3.37,0.43,12.4 +6.5,0.21,0.35,5.7,0.043,47.0,197.0,0.99392,3.24,0.5,10.1 +6.8,0.29,0.22,3.4,0.035,40.0,122.0,0.99024,3.09,0.47,12.3 +5.0,0.24,0.34,1.1,0.034,49.0,158.0,0.98774,3.32,0.32,13.1 +5.9,0.18,0.28,1.0,0.037,24.0,88.0,0.99094,3.29,0.55,10.65 +5.8,0.26,0.29,1.0,0.042,35.0,101.0,0.99044,3.36,0.48,11.4 +6.7,0.61,0.21,1.65,0.117,40.0,240.0,0.9938,3.11,0.57,9.3 +5.7,0.695,0.06,6.8,0.042,9.0,84.0,0.99432,3.44,0.44,10.2 +5.6,0.695,0.06,6.8,0.042,9.0,84.0,0.99432,3.44,0.44,10.2 +5.7,0.39,0.25,4.9,0.033,49.0,113.0,0.98966,3.26,0.58,13.1 +6.1,0.38,0.47,1.4,0.051,59.0,210.0,0.99309,3.24,0.5,9.6 +6.3,0.36,0.28,2.5,0.035,18.0,73.0,0.98868,3.1,0.47,12.8 +6.0,0.29,0.41,10.8,0.048,55.0,149.0,0.9937,3.09,0.59,10.96666667 +6.0,0.29,0.41,10.8,0.048,55.0,149.0,0.9937,3.09,0.59,10.96666667 +6.0,0.29,0.41,10.8,0.048,55.0,149.0,0.9937,3.09,0.59,10.96666667 +6.0,0.29,0.41,10.8,0.048,55.0,149.0,0.9937,3.09,0.59,11.0 +7.1,0.43,0.25,2.8,0.036,43.0,132.0,0.98975,3.21,0.47,13.4 +6.6,0.25,0.25,1.3,0.04,28.0,85.0,0.98984,2.87,0.48,11.2 +6.6,0.33,0.41,2.0,0.027,14.0,79.0,0.99063,3.27,0.63,12.4 +8.0,0.23,0.41,1.1,0.048,35.0,150.0,0.99168,3.09,0.47,11.2 +7.3,0.17,0.36,8.2,0.028,44.0,111.0,0.99272,3.14,0.41,12.4 +6.0,0.17,0.33,6.0,0.036,30.0,111.0,0.99362,3.32,0.58,10.13333333 +6.1,0.16,0.34,6.1,0.034,31.0,114.0,0.99365,3.32,0.58,10.13333333 +7.3,0.17,0.36,8.2,0.028,44.0,111.0,0.99272,3.14,0.41,12.4 +6.4,0.31,0.53,8.8,0.057,36.0,221.0,0.99642,3.17,0.44,9.1 +6.1,0.16,0.34,6.1,0.034,31.0,114.0,0.99365,3.32,0.58,10.15 +6.0,0.17,0.33,6.0,0.036,30.0,111.0,0.99362,3.32,0.58,10.15 +5.9,0.44,0.33,1.2,0.049,12.0,117.0,0.99134,3.46,0.44,11.5 +6.6,0.285,0.49,11.4,0.035,57.0,137.0,0.99732,3.08,0.54,8.9 +4.9,0.335,0.14,1.3,0.036,69.0,168.0,0.99212,3.47,0.46,10.46666667 +4.9,0.335,0.14,1.3,0.036,69.0,168.0,0.99212,3.47,0.46,10.46666667 +6.0,0.28,0.52,6.2,0.028,37.0,104.0,0.99161,3.28,0.51,11.8 +5.8,0.35,0.29,3.2,0.034,41.0,151.0,0.9912,3.35,0.58,11.63333333 +5.7,0.21,0.37,4.5,0.04,58.0,140.0,0.99332,3.29,0.62,10.6 +6.5,0.25,0.32,9.9,0.045,41.0,128.0,0.99636,3.18,0.52,9.6 +6.0,0.28,0.52,6.2,0.028,37.0,104.0,0.99161,3.28,0.51,11.8 +6.6,0.285,0.49,11.4,0.035,57.0,137.0,0.99732,3.08,0.54,8.9 +4.7,0.335,0.14,1.3,0.036,69.0,168.0,0.99212,3.47,0.46,10.5 +6.8,0.63,0.04,1.3,0.058,25.0,133.0,0.99271,3.17,0.39,10.2 +5.6,0.27,0.37,0.9,0.025,11.0,49.0,0.98845,3.29,0.33,13.1 +6.8,0.32,0.33,0.7,0.027,15.0,66.0,0.9899,3.11,0.31,11.8 +6.5,0.33,0.32,1.0,0.041,39.0,120.0,0.99004,3.06,0.37,12.2 +6.0,0.24,0.34,1.0,0.036,52.0,184.0,0.99097,3.44,0.44,11.45 +7.2,0.26,0.32,10.4,0.062,23.0,114.0,0.9966,3.23,0.49,10.5 +6.8,0.63,0.04,1.3,0.058,25.0,133.0,0.99271,3.17,0.39,10.2 +6.7,0.16,0.32,12.5,0.035,18.0,156.0,0.99666,2.88,0.36,9.0 +6.7,0.16,0.32,12.5,0.035,18.0,156.0,0.99666,2.88,0.36,9.0 +6.7,0.16,0.32,12.5,0.035,18.0,156.0,0.99666,2.88,0.36,9.0 +6.7,0.16,0.32,12.5,0.035,18.0,156.0,0.99666,2.88,0.36,9.0 +6.9,0.19,0.31,19.25,0.043,38.0,167.0,0.99954,2.93,0.52,9.1 +6.0,0.36,0.32,1.1,0.053,26.0,173.0,0.99414,3.38,0.54,8.8 +6.7,0.16,0.32,12.5,0.035,18.0,156.0,0.99666,2.88,0.36,9.0 +6.9,0.19,0.31,19.25,0.043,38.0,167.0,0.99954,2.93,0.52,9.1 +6.7,0.35,0.32,9.0,0.032,29.0,113.0,0.99188,3.13,0.65,12.9 +6.1,0.15,0.4,1.2,0.03,19.0,84.0,0.98926,3.19,0.96,13.0 +6.7,0.35,0.32,9.0,0.032,29.0,113.0,0.99188,3.13,0.65,12.9 +7.0,0.27,0.74,1.3,0.173,34.0,121.0,0.99334,3.04,0.46,9.2 +6.8,0.3,0.33,12.8,0.041,60.0,168.0,0.99659,3.1,0.56,9.8 +6.8,0.3,0.33,12.8,0.041,60.0,168.0,0.99659,3.1,0.56,9.8 +6.4,0.69,0.09,7.6,0.044,34.0,144.0,0.9948,3.26,0.38,10.1 +6.4,0.69,0.09,7.6,0.044,34.0,144.0,0.9948,3.26,0.38,10.1 +5.9,0.12,0.28,1.4,0.037,36.0,83.0,0.99074,3.33,0.42,10.9 +6.3,0.36,0.5,8.3,0.053,51.0,202.0,0.9955,3.2,0.51,9.6 +5.7,0.27,0.16,9.0,0.053,32.0,111.0,0.99474,3.36,0.37,10.4 +6.1,0.22,0.4,1.85,0.031,25.0,111.0,0.98966,3.03,0.3,11.8 +5.6,0.205,0.16,12.55,0.051,31.0,115.0,0.99564,3.4,0.38,10.8 +7.2,0.33,0.28,1.4,0.034,26.0,109.0,0.99246,3.28,0.57,10.6 +5.9,0.21,0.31,1.8,0.033,45.0,142.0,0.98984,3.35,0.5,12.7 +5.4,0.33,0.31,4.0,0.03,27.0,108.0,0.99031,3.3,0.43,12.2 +5.4,0.205,0.16,12.55,0.051,31.0,115.0,0.99564,3.4,0.38,10.8 +5.7,0.27,0.16,9.0,0.053,32.0,111.0,0.99474,3.36,0.37,10.4 +6.4,0.28,0.28,3.0,0.04,19.0,98.0,0.99216,3.25,0.47,11.1 +6.1,0.22,0.4,1.85,0.031,25.0,111.0,0.98966,3.03,0.3,11.8 +6.7,0.15,0.32,7.9,0.034,17.0,81.0,0.99512,3.29,0.31,10.0 +5.5,0.315,0.38,2.6,0.033,10.0,69.0,0.9909,3.12,0.59,10.8 +4.8,0.225,0.38,1.2,0.074,47.0,130.0,0.99132,3.31,0.4,10.3 +5.2,0.24,0.15,7.1,0.043,32.0,134.0,0.99378,3.24,0.48,9.9 +6.7,0.15,0.32,7.9,0.034,17.0,81.0,0.99512,3.29,0.31,10.0 +6.6,0.27,0.32,1.3,0.044,18.0,93.0,0.99044,3.11,0.56,12.25 +6.1,0.32,0.33,10.7,0.036,27.0,98.0,0.99521,3.34,0.52,10.2 +6.0,0.25,0.28,7.7,0.053,37.0,132.0,0.99489,3.06,0.5,9.4 +6.4,0.42,0.46,8.4,0.05,58.0,180.0,0.99495,3.18,0.46,9.7 +6.1,0.32,0.33,10.7,0.036,27.0,98.0,0.99521,3.34,0.52,10.2 +6.9,0.31,0.33,12.7,0.038,33.0,116.0,0.9954,3.04,0.65,10.4 +6.3,0.48,0.48,1.8,0.035,35.0,96.0,0.99121,3.49,0.74,12.2 +6.0,0.25,0.28,7.7,0.053,37.0,132.0,0.99489,3.06,0.5,9.4 +7.2,0.21,0.31,10.5,0.035,36.0,122.0,0.99478,3.12,0.4,10.6 +6.8,0.32,0.43,1.6,0.05,4.0,65.0,0.99346,3.27,0.47,10.7 +7.9,0.3,0.6,1.85,0.048,13.0,106.0,0.99331,3.24,0.49,11.85 +5.3,0.31,0.38,10.5,0.031,53.0,140.0,0.99321,3.34,0.46,11.7 +5.3,0.31,0.38,10.5,0.031,53.0,140.0,0.99321,3.34,0.46,11.7 +5.2,0.185,0.22,1.0,0.03,47.0,123.0,0.99218,3.55,0.44,10.15 +5.5,0.16,0.31,1.2,0.026,31.0,68.0,0.9898,3.33,0.44,11.63333333 +6.0,0.17,0.36,1.7,0.042,14.0,61.0,0.99144,3.22,0.54,10.8 +6.0,0.16,0.36,1.6,0.042,13.0,61.0,0.99143,3.22,0.54,10.8 +6.1,0.24,0.32,9.0,0.031,41.0,134.0,0.99234,3.25,0.26,12.3 +5.5,0.3,0.25,1.9,0.029,33.0,118.0,0.98972,3.36,0.66,12.5 +5.5,0.16,0.31,1.2,0.026,31.0,68.0,0.9898,3.33,0.44,11.65 +6.0,0.32,0.46,1.5,0.05,56.0,189.0,0.99308,3.24,0.49,9.6 +6.1,0.27,0.31,3.9,0.034,42.0,137.0,0.99218,3.24,0.46,10.9 +6.0,0.27,0.32,3.6,0.035,36.0,133.0,0.99215,3.23,0.46,10.8 +6.0,0.14,0.37,1.2,0.032,63.0,148.0,0.99185,3.32,0.44,11.2 +5.0,0.24,0.19,5.0,0.043,17.0,101.0,0.99438,3.67,0.57,10.0 +6.1,0.26,0.25,2.9,0.047,289.0,440.0,0.99314,3.44,0.64,10.5 +6.3,0.23,0.5,10.4,0.043,61.0,132.0,0.99542,2.86,0.46,9.1 +5.6,0.26,0.5,11.4,0.029,25.0,93.0,0.99428,3.23,0.49,10.5 +6.1,0.34,0.24,18.35,0.05,33.0,184.0,0.99943,3.12,0.61,9.3 +6.2,0.35,0.25,18.4,0.051,28.0,182.0,0.99946,3.13,0.62,9.3 +6.0,0.14,0.37,1.2,0.032,63.0,148.0,0.99185,3.32,0.44,11.2 +7.3,0.36,0.62,7.1,0.033,48.0,185.0,0.99472,3.14,0.62,10.6 +5.1,0.25,0.36,1.3,0.035,40.0,78.0,0.9891,3.23,0.64,12.1 +5.5,0.16,0.26,1.5,0.032,35.0,100.0,0.99076,3.43,0.77,12.0 +6.4,0.19,0.35,10.2,0.043,40.0,106.0,0.99632,3.16,0.5,9.7 +6.6,0.29,0.73,2.2,0.027,21.0,92.0,0.99,3.12,0.48,12.4 +6.0,0.38,0.26,3.5,0.035,38.0,111.0,0.98872,3.18,0.47,13.6 +6.0,0.38,0.26,3.5,0.035,38.0,111.0,0.98872,3.18,0.47,13.6 +6.5,0.2,0.35,3.9,0.04,27.0,140.0,0.99102,2.98,0.53,11.8 +6.6,0.17,0.26,7.4,0.052,45.0,128.0,0.99388,3.16,0.37,10.0 +6.6,0.17,0.26,7.4,0.052,45.0,128.0,0.99388,3.16,0.37,10.0 +6.2,0.15,0.27,11.0,0.035,46.0,116.0,0.99602,3.12,0.38,9.1 +5.9,0.48,0.3,1.5,0.037,19.0,78.0,0.99057,3.47,0.42,11.9 +5.3,0.4,0.25,3.9,0.031,45.0,130.0,0.99072,3.31,0.58,11.75 +5.9,0.26,0.29,5.4,0.046,34.0,116.0,0.99224,3.24,0.41,11.4 +5.2,0.3,0.34,1.5,0.038,18.0,96.0,0.98942,3.56,0.48,13.0 +6.4,0.32,0.25,5.0,0.055,28.0,138.0,0.99171,3.27,0.5,12.4 +6.6,0.19,0.25,1.2,0.052,34.0,181.0,0.99352,3.3,0.42,9.4 +6.8,0.27,0.3,13.0,0.047,69.0,160.0,0.99705,3.16,0.5,9.6 +6.8,0.27,0.3,13.0,0.047,69.0,160.0,0.99705,3.16,0.5,9.6 +6.8,0.27,0.3,13.0,0.047,69.0,160.0,0.99705,3.16,0.5,9.6 +6.8,0.27,0.3,13.0,0.047,69.0,160.0,0.99705,3.16,0.5,9.6 +6.4,0.28,0.45,8.6,0.057,47.0,223.0,0.99654,3.16,0.51,9.1 +5.2,0.21,0.31,1.7,0.048,17.0,61.0,0.98953,3.24,0.37,12.0 +7.1,0.24,0.34,1.2,0.045,6.0,132.0,0.99132,3.16,0.46,11.2 +5.0,0.27,0.4,1.2,0.076,42.0,124.0,0.99204,3.32,0.47,10.1 +5.8,0.27,0.4,1.2,0.076,47.0,130.0,0.99185,3.13,0.45,10.3 +5.9,0.27,0.32,2.0,0.034,31.0,102.0,0.98952,3.16,0.56,12.3 +5.8,0.315,0.19,19.4,0.031,28.0,106.0,0.99704,2.97,0.4,10.55 +6.0,0.59,0.0,0.8,0.037,30.0,95.0,0.99032,3.1,0.4,10.9 +5.8,0.3,0.09,6.3,0.042,36.0,138.0,0.99382,3.15,0.48,9.7 +5.6,0.3,0.1,6.4,0.043,34.0,142.0,0.99382,3.14,0.48,9.8 +6.7,0.3,0.5,12.1,0.045,38.0,127.0,0.9974,3.04,0.53,8.9 +6.7,0.3,0.5,12.1,0.045,38.0,127.0,0.9974,3.04,0.53,8.9 +6.4,0.31,0.31,12.9,0.045,55.0,161.0,0.99546,3.02,0.59,10.2 +6.9,0.25,0.29,2.4,0.038,28.0,76.0,0.99088,3.01,0.36,11.7 +4.4,0.32,0.39,4.3,0.03,31.0,127.0,0.98904,3.46,0.36,12.8 +3.9,0.225,0.4,4.2,0.03,29.0,118.0,0.989,3.57,0.36,12.8 +6.4,0.31,0.31,12.9,0.045,55.0,161.0,0.99546,3.02,0.59,10.2 +5.5,0.62,0.33,1.7,0.037,24.0,118.0,0.98758,3.15,0.39,13.55 +6.2,0.3,0.42,2.2,0.036,28.0,128.0,0.9901,3.13,0.38,11.6 +6.7,0.3,0.5,12.1,0.045,38.0,127.0,0.9974,3.04,0.53,8.9 +4.7,0.785,0.0,3.4,0.036,23.0,134.0,0.98981,3.53,0.92,13.8 +6.0,0.31,0.32,7.4,0.175,47.0,159.0,0.9952,3.19,0.5,9.4 +6.0,0.32,0.3,7.3,0.174,46.0,159.0,0.99519,3.18,0.49,9.4 +6.4,0.105,0.29,1.1,0.035,44.0,140.0,0.99142,3.17,0.55,10.7 +6.4,0.105,0.29,1.1,0.035,44.0,140.0,0.99142,3.17,0.55,10.7 +5.7,0.33,0.32,1.4,0.043,28.0,93.0,0.9897,3.31,0.5,12.3 +5.9,0.32,0.19,14.5,0.042,37.0,115.0,0.99684,3.16,0.43,10.3 +6.2,0.26,0.2,8.0,0.047,35.0,111.0,0.99445,3.11,0.42,10.4 +6.0,0.2,0.33,1.1,0.039,45.0,126.0,0.99051,3.31,0.45,11.6 +6.4,0.105,0.29,1.1,0.035,44.0,140.0,0.99142,3.17,0.55,10.7 +5.8,0.28,0.34,2.2,0.037,24.0,125.0,0.98986,3.36,0.33,12.8 +6.4,0.31,0.5,5.8,0.038,42.0,111.0,0.99189,3.18,0.53,11.9 +6.0,0.35,0.46,0.9,0.033,9.0,65.0,0.98934,3.24,0.35,12.1 +5.1,0.26,0.34,6.4,0.034,26.0,99.0,0.99449,3.23,0.41,9.2 +6.6,0.28,0.09,10.9,0.051,37.0,131.0,0.99566,2.93,0.62,9.5 +6.0,0.17,0.3,7.3,0.039,39.0,104.0,0.99252,2.91,0.57,11.0 +7.3,0.35,0.67,8.3,0.053,10.0,100.0,0.9959,3.19,0.5,10.9 +6.0,0.26,0.24,1.3,0.053,66.0,150.0,0.9924,3.21,0.62,10.4 +5.4,0.375,0.4,3.3,0.054,29.0,147.0,0.99482,3.42,0.52,9.1 +7.0,0.17,0.42,1.0,0.075,19.0,71.0,0.99103,3.32,0.62,11.4 +5.1,0.26,0.33,1.1,0.027,46.0,113.0,0.98946,3.35,0.43,11.4 +5.8,0.36,0.5,1.0,0.127,63.0,178.0,0.99212,3.1,0.45,9.7 +5.7,0.4,0.35,5.1,0.026,17.0,113.0,0.99052,3.18,0.67,12.4 +6.2,0.76,0.01,3.2,0.041,18.0,120.0,0.99026,3.2,0.94,13.7 +6.1,0.41,0.2,12.6,0.032,54.0,136.0,0.99516,2.91,0.43,10.6 +5.8,0.385,0.25,3.7,0.031,38.0,122.0,0.99128,3.2,0.63,11.2 +6.0,0.27,0.4,1.7,0.021,18.0,82.0,0.9891,3.24,0.95,13.13333333 +5.7,0.4,0.35,5.1,0.026,17.0,113.0,0.99052,3.18,0.67,12.4 +5.8,0.36,0.5,1.0,0.127,63.0,178.0,0.99212,3.1,0.45,9.7 +7.0,0.24,0.47,1.3,0.043,18.0,131.0,0.99176,3.19,0.45,11.0 +6.8,0.23,0.48,1.5,0.036,35.0,165.0,0.99162,3.18,0.45,11.3 +6.5,0.28,0.34,4.6,0.054,22.0,130.0,0.99193,3.2,0.46,12.0 +6.4,0.23,0.35,10.3,0.042,54.0,140.0,0.9967,3.23,0.47,9.2 +6.0,0.34,0.29,6.1,0.046,29.0,134.0,0.99462,3.48,0.57,10.7 +6.0,0.34,0.29,6.1,0.046,29.0,134.0,0.99462,3.48,0.57,10.7 +6.7,0.22,0.33,1.2,0.036,36.0,86.0,0.99058,3.1,0.76,11.4 +6.4,0.23,0.35,10.3,0.042,54.0,140.0,0.9967,3.23,0.47,9.2 +6.0,0.32,0.33,9.9,0.032,22.0,90.0,0.99258,3.1,0.43,12.1 +5.8,0.29,0.27,1.6,0.062,17.0,140.0,0.99138,3.23,0.35,11.1 +5.8,0.38,0.26,1.1,0.058,20.0,140.0,0.99271,3.27,0.43,9.7 +5.9,0.32,0.26,1.5,0.057,17.0,141.0,0.9917,3.24,0.36,10.7 +5.6,0.33,0.28,1.2,0.031,33.0,97.0,0.99126,3.49,0.58,10.9 +5.9,0.37,0.3,1.5,0.033,35.0,95.0,0.98986,3.36,0.56,12.0 +5.6,0.295,0.26,1.1,0.035,40.0,102.0,0.99154,3.47,0.56,10.6 +6.7,0.5,0.36,11.5,0.096,18.0,92.0,0.99642,3.11,0.49,9.6 +6.5,0.28,0.38,7.8,0.031,54.0,216.0,0.99154,3.03,0.42,13.1 +5.3,0.275,0.24,7.4,0.038,28.0,114.0,0.99313,3.38,0.51,11.0 +5.2,0.405,0.15,1.45,0.038,10.0,44.0,0.99125,3.52,0.4,11.6 +6.8,0.34,0.36,8.9,0.029,44.0,128.0,0.99318,3.28,0.35,11.95 +5.7,0.22,0.25,1.1,0.05,97.0,175.0,0.99099,3.44,0.62,11.1 +6.2,0.28,0.57,1.0,0.043,50.0,92.0,0.99004,3.17,0.36,11.5 +5.6,0.34,0.25,2.5,0.046,47.0,182.0,0.99093,3.21,0.4,11.3 +4.8,0.29,0.23,1.1,0.044,38.0,180.0,0.98924,3.28,0.34,11.9 +6.6,0.38,0.29,2.4,0.136,15.0,93.0,0.99336,3.18,0.6,9.5 +5.1,0.3,0.3,2.3,0.048,40.0,150.0,0.98944,3.29,0.46,12.2 +4.4,0.54,0.09,5.1,0.038,52.0,97.0,0.99022,3.41,0.4,12.2 +7.0,0.36,0.35,2.5,0.048,67.0,161.0,0.99146,3.05,0.56,11.1 +6.4,0.33,0.44,8.9,0.055,52.0,164.0,0.99488,3.1,0.48,9.6 +7.0,0.36,0.35,2.5,0.048,67.0,161.0,0.99146,3.05,0.56,11.1 +6.4,0.33,0.44,8.9,0.055,52.0,164.0,0.99488,3.1,0.48,9.6 +6.2,0.23,0.38,1.6,0.044,12.0,113.0,0.99176,3.3,0.73,11.4 +5.2,0.25,0.23,1.4,0.047,20.0,77.0,0.99001,3.32,0.62,11.4 +6.2,0.35,0.29,3.9,0.041,22.0,79.0,0.99005,3.1,0.59,12.06666667 +7.1,0.23,0.39,13.7,0.058,26.0,172.0,0.99755,2.9,0.46,9.0 +7.1,0.23,0.39,13.7,0.058,26.0,172.0,0.99755,2.9,0.46,9.0 +7.5,0.38,0.33,9.2,0.043,19.0,116.0,0.99444,3.08,0.42,11.4 +6.4,0.35,0.51,7.8,0.055,53.0,177.0,0.99502,3.12,0.45,9.6 +6.0,0.43,0.34,7.6,0.045,25.0,118.0,0.99222,3.03,0.37,11.0 +6.0,0.52,0.33,7.7,0.046,24.0,119.0,0.99224,3.04,0.38,11.0 +5.5,0.31,0.29,3.0,0.027,16.0,102.0,0.99067,3.23,0.56,11.2 +5.9,0.22,0.3,1.3,0.052,42.0,86.0,0.99069,3.31,0.47,11.55 +6.2,0.36,0.32,4.0,0.036,44.0,92.0,0.98936,3.2,0.5,13.3 +6.0,0.41,0.23,1.1,0.066,22.0,148.0,0.99266,3.3,0.47,9.633333333 +6.2,0.355,0.35,2.0,0.046,31.0,95.0,0.98822,3.06,0.46,13.6 +5.7,0.41,0.21,1.9,0.048,30.0,112.0,0.99138,3.29,0.55,11.2 +5.3,0.6,0.34,1.4,0.031,3.0,60.0,0.98854,3.27,0.38,13.0 +5.8,0.23,0.31,4.5,0.046,42.0,124.0,0.99324,3.31,0.64,10.8 +6.6,0.24,0.33,10.1,0.032,8.0,81.0,0.99626,3.19,0.51,9.8 +6.1,0.32,0.28,6.6,0.021,29.0,132.0,0.99188,3.15,0.36,11.45 +5.0,0.2,0.4,1.9,0.015,20.0,98.0,0.9897,3.37,0.55,12.05 +6.0,0.42,0.41,12.4,0.032,50.0,179.0,0.99622,3.14,0.6,9.7 +5.7,0.21,0.32,1.6,0.03,33.0,122.0,0.99044,3.33,0.52,11.9 +5.6,0.2,0.36,2.5,0.048,16.0,125.0,0.99282,3.49,0.49,10.0 +7.4,0.22,0.26,1.2,0.035,18.0,97.0,0.99245,3.12,0.41,9.7 +6.2,0.38,0.42,2.5,0.038,34.0,117.0,0.99132,3.36,0.59,11.6 +5.9,0.54,0.0,0.8,0.032,12.0,82.0,0.99286,3.25,0.36,8.8 +6.2,0.53,0.02,0.9,0.035,6.0,81.0,0.99234,3.24,0.35,9.5 +6.6,0.34,0.4,8.1,0.046,68.0,170.0,0.99494,3.15,0.5,9.533333333 +6.6,0.34,0.4,8.1,0.046,68.0,170.0,0.99494,3.15,0.5,9.533333333 +5.0,0.235,0.27,11.75,0.03,34.0,118.0,0.9954,3.07,0.5,9.4 +5.5,0.32,0.13,1.3,0.037,45.0,156.0,0.99184,3.26,0.38,10.7 +4.9,0.47,0.17,1.9,0.035,60.0,148.0,0.98964,3.27,0.35,11.5 +6.5,0.33,0.38,8.3,0.048,68.0,174.0,0.99492,3.14,0.5,9.6 +6.6,0.34,0.4,8.1,0.046,68.0,170.0,0.99494,3.15,0.5,9.55 +6.2,0.21,0.28,5.7,0.028,45.0,121.0,0.99168,3.21,1.08,12.15 +6.2,0.41,0.22,1.9,0.023,5.0,56.0,0.98928,3.04,0.79,13.0 +6.8,0.22,0.36,1.2,0.052,38.0,127.0,0.9933,3.04,0.54,9.2 +4.9,0.235,0.27,11.75,0.03,34.0,118.0,0.9954,3.07,0.5,9.4 +6.1,0.34,0.29,2.2,0.036,25.0,100.0,0.98938,3.06,0.44,11.8 +5.7,0.21,0.32,0.9,0.038,38.0,121.0,0.99074,3.24,0.46,10.6 +6.5,0.23,0.38,1.3,0.032,29.0,112.0,0.99298,3.29,0.54,9.7 +6.2,0.21,0.29,1.6,0.039,24.0,92.0,0.99114,3.27,0.5,11.2 +6.6,0.32,0.36,8.0,0.047,57.0,168.0,0.9949,3.15,0.46,9.6 +6.5,0.24,0.19,1.2,0.041,30.0,111.0,0.99254,2.99,0.46,9.4 +5.5,0.29,0.3,1.1,0.022,20.0,110.0,0.98869,3.34,0.38,12.8 +6.0,0.21,0.38,0.8,0.02,22.0,98.0,0.98941,3.26,0.32,11.8 diff --git a/data/wine_quality/resnet_model.pth b/data/wine_quality/resnet_model.pth new file mode 100644 index 0000000..d9bce75 Binary files /dev/null and b/data/wine_quality/resnet_model.pth differ diff --git a/data/wine_quality/xgb_model.json b/data/wine_quality/xgb_model.json new file mode 100644 index 0000000..1dd86a5 --- /dev/null +++ b/data/wine_quality/xgb_model.json @@ -0,0 +1 @@ +{"learner":{"attributes":{"scikit_learn":"{\"_estimator_type\": \"classifier\"}"},"feature_names":[],"feature_types":[],"gradient_booster":{"model":{"cats":{"enc":[],"feature_segments":[],"sorted_idx":[]},"gbtree_model_param":{"num_parallel_tree":"1","num_trees":"100"},"iteration_indptr":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100],"tree_info":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"trees":[{"base_weights":[-7.575618E-8,-6.815155E-1,1.2107216E0,3.0258325E-1,-8.8606197E-1,5.7677656E-1,1.8583015E0,-2.9965278E-2,1.9277803E0,-9.91891E-1,-2.9659963E-1,-4.3066958E-1,9.724098E-1,1.1031969E0,2.27895E0,-4.43183E-1,6.274025E-1,3.7971673E0,-3.327744E-1,-7.144528E-1,-1.0915556E0,-4.1987383E-1,1.1990649E0,-7.408951E-1,4.354897E-1,6.2080085E-1,1.5028325E0,4.1818157E-1,2.417075E0,1.1307507E0,2.6802728E0,-1.0718073E0,-2.0210224E-1,1.4079916E0,-8.716934E-2,3.5314137E-1,1.265983E0,-2.9272592E-1,4.3436742E-1,-9.7858036E-1,-4.0405825E-1,-1.1665559E0,-8.229003E-1,-7.936303E-1,2.3078002E-1,7.318631E-1,-2.8114828E-1,2.738935E-1,-8.741516E-1,1.5365608E0,-4.402317E-2,7.957706E-1,-5.663981E-1,1.8424253E0,-3.907813E-1,2.5119467E0,1.9082513E-1,-3.7056312E-2,2.71678E0,8.006491E-1,8.6817044E-1,3.1196244E0,1.4089954E0,-9.519319E-2,-3.410982E-1,5.6676537E-2,-2.7417532E-1,1.5838951E-1,6.1751163E-1,-2.4027723E-1,1.2824085E-1,3.5314137E-1,-1.272856E-1,-3.2641402E-1,-7.397277E-2,-2.3294312E-1,1.093029E-1,-3.540361E-1,-2.767441E-2,-2.995557E-1,-3.3856723E-2,-2.6818296E-1,1.3520648E-1,5.0959116E-1,-1.6655535E-1,-2.0836172E-1,8.9040995E-2,-2.8788832E-1,1.52662E-1,5.8582294E-1,8.9040995E-2,-2.7616066E-1,1.4362375E-1,1.304555E-1,4.1877818E-1,-2.8860706E-1,3.2193005E-1,2.686055E-1,7.5027585E-1,-3.0825803E-1,3.5314137E-1,2.3155595E-1,8.9622754E-1,1.446244E-1,-2.508521E-1,8.674211E-1,1.52662E-1,4.1570476E-1,-1.152172E-1,9.656104E-1,-1.9598895E-1,5.053912E-1,-4.1879423E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":0,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,57,59,61,63,65,67,69,-1,-1,-1,71,73,75,77,79,81,83,-1,85,-1,87,89,91,93,95,97,99,101,103,-1,105,107,-1,109,111,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[6.7859546E2,1.0619113E2,1.2052896E2,4.9400246E1,2.6899017E1,6.0655197E1,4.388449E1,2.1132723E1,6.686322E1,9.427032E0,1.2999803E1,1.1959535E1,1.9362228E1,4.777056E1,3.97666E1,7.01645E0,1.6882458E1,4.533638E0,4.3432355E0,7.840172E0,4.4739685E0,1.5590436E1,1.0638239E1,7.90716E0,6.4299984E0,1.439827E1,2.816304E1,1.7055227E1,1.4319191E1,1.2690071E1,3.363626E1,2.7848148E-1,1.0117609E1,7.165634E0,6.450432E0,0E0,0E0,0E0,3.2515514E0,4.024147E0,1.3986116E1,2.9284668E0,7.649811E0,5.4538803E0,2.850474E1,0E0,1.010811E0,0E0,4.0776577E0,1.3210669E0,4.732672E0,1.2158936E1,7.00548E0,2.0437286E1,8.51211E0,6.113205E-1,1.066564E1,0E0,4.8789062E0,1.6543205E1,0E0,2.0710938E1,8.478695E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,38,38,39,39,40,40,41,41,42,42,43,43,44,44,46,46,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55,56,56,58,58,59,59,61,61,62,62],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,-1,-1,-1,72,74,76,78,80,82,84,-1,86,-1,88,90,92,94,96,98,100,102,104,-1,106,108,-1,110,112,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[3.4227094E-1,-8.1802267E-1,1.0549875E0,1.0683435E0,6.8749217E-3,-2.6587364E-1,-4.803529E-1,-1.6082309E-1,-5.9413753E-2,-4.5355532E-1,2.0709887E0,1.9577834E-1,1.9592093E-1,7.3075134E-1,-7.657978E-1,-2.6587364E-1,-4.0061373E-1,-1.4369774E-1,1.6019539E0,-2.432298E-1,-4.1237012E-1,6.313125E-1,9.3145706E-2,-8.4839493E-1,-6.86076E-1,7.47652E-1,5.414116E-1,-1.2854533E0,-9.236573E-1,7.8739315E-1,-4.5770618E-1,-8.498767E-1,5.1763475E-1,-7.8681755E-1,-1.1411514E-1,3.5314137E-1,1.265983E0,-2.9272592E-1,9.245883E-1,8.8010764E-1,1.6094304E0,3.382379E0,7.9795814E-1,1.2684059E0,-3.3206618E-1,7.318631E-1,3.3052385E0,2.738935E-1,9.157797E-1,5.691137E-1,-8.2178444E-1,-5.491631E-1,1.5480409E0,-7.237584E-1,-5.7662034E-1,1.8934776E0,8.8282496E-1,-3.7056312E-2,9.9497205E-1,-1.519667E0,8.6817044E-1,8.367388E-1,6.1836034E-1,-9.519319E-2,-3.410982E-1,5.6676537E-2,-2.7417532E-1,1.5838951E-1,6.1751163E-1,-2.4027723E-1,1.2824085E-1,3.5314137E-1,-1.272856E-1,-3.2641402E-1,-7.397277E-2,-2.3294312E-1,1.093029E-1,-3.540361E-1,-2.767441E-2,-2.995557E-1,-3.3856723E-2,-2.6818296E-1,1.3520648E-1,5.0959116E-1,-1.6655535E-1,-2.0836172E-1,8.9040995E-2,-2.8788832E-1,1.52662E-1,5.8582294E-1,8.9040995E-2,-2.7616066E-1,1.4362375E-1,1.304555E-1,4.1877818E-1,-2.8860706E-1,3.2193005E-1,2.686055E-1,7.5027585E-1,-3.0825803E-1,3.5314137E-1,2.3155595E-1,8.9622754E-1,1.446244E-1,-2.508521E-1,8.674211E-1,1.52662E-1,4.1570476E-1,-1.152172E-1,9.656104E-1,-1.9598895E-1,5.053912E-1,-4.1879423E-2],"split_indices":[10,1,10,7,10,2,5,10,2,1,0,5,8,9,3,2,4,4,7,0,10,8,1,1,4,6,4,9,8,5,4,3,6,3,5,0,0,0,6,8,3,0,9,9,1,0,0,0,3,8,0,7,5,3,3,10,1,0,7,7,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[8.204148E2,5.252107E2,2.952041E2,9.029772E1,4.34913E2,1.5012785E2,1.4507623E2,7.5774315E1,1.452341E1,3.6813687E2,6.6776115E1,4.2307327E1,1.0782053E2,5.3199883E1,9.187636E1,4.6727493E1,2.904682E1,7.5774317E0,6.9459786E0,9.945379E1,2.6868307E2,6.2355946E1,4.4201684E0,3.1256905E1,1.1050421E1,6.614466E1,4.1675873E1,3.5834938E1,1.7364946E1,2.4784515E1,6.709184E1,1.19976E1,3.4729893E1,1.3418368E1,1.5628452E1,1.5786315E0,5.9988E0,3.6308525E0,3.3151262E0,5.256843E1,4.6885357E1,2.0743219E2,6.1250904E1,3.946579E1,2.2890158E1,2.052221E0,2.3679473E0,1.8943579E0,2.9362547E1,2.6836736E0,8.366747E0,5.793578E1,8.208884E0,3.5361347E1,6.314526E0,2.5258105E0,3.3309124E1,1.8943579E0,1.547059E1,2.2100842E1,2.6836736E0,4.8779716E1,1.8312126E1,1.4207684E0,1.0576832E1,2.2890158E1,1.1839737E1,6.472389E0,6.9459786E0,6.314526E0,9.313926E0,1.5786315E0,1.7364947E0,4.5306725E1,7.261705E0,3.1572632E1,1.5312726E1,2.047485E2,2.6836736E0,4.8779716E1,1.24711895E1,3.6782116E1,2.6836736E0,7.5774317E0,1.5312726E1,1.2629052E0,1.1050421E0,2.7941778E1,1.4207684E0,1.5786315E0,1.1050421E0,2.8415368E0,5.5252104E0,3.7255703E1,2.0680073E1,6.9459786E0,1.2629052E0,1.547059E1,1.9890757E1,4.7358947E0,1.5786315E0,1.1050421E0,1.4207684E0,2.6363148E1,6.9459786E0,1.4049821E1,1.4207684E0,1.4681273E1,7.419568E0,4.767467E1,1.1050421E0,1.547059E1,2.8415368E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"113","size_leaf_vector":"1"}},{"base_weights":[-4.2900924E-2,-6.4209104E-1,6.054821E-1,7.222129E-2,-8.508806E-1,4.431229E-1,1.3265942E0,-2.4230647E-1,6.9886976E-1,-9.796455E-1,-5.683365E-1,-5.3798044E-1,5.779344E-1,1.5067829E0,1.9948387E-1,-5.263523E-1,6.9302744E-1,1.4679376E0,-2.1616076E-1,-1.1012281E0,-4.927311E-1,-6.9075745E-1,-1.3663335E-1,5.072461E-1,-7.088711E-1,8.751316E-2,6.9272554E-1,-3.4696355E-1,1.5939506E0,-8.628777E-1,1.0217881E0,2.858268E-1,-5.873692E-1,1.5500419E0,-6.184563E-1,-6.2764686E-1,1.8377943E0,-5.849824E-1,1.0519383E0,-1.1161706E0,-1.5105043E-1,1.1184083E-1,-3.3057302E-1,-3.627521E-1,-1.0317048E0,4.7072807E-1,-7.970467E-1,4.8174965E-1,-1.9247495E-1,7.219164E-2,-8.021752E-1,1.6403742E0,-1.2663408E-1,4.5120254E-1,1.1199576E0,9.682874E-2,-2.2891994E-1,1.6408136E0,-1.1853284E-1,-5.3963672E-2,-2.963249E-1,5.336271E-1,-1.6840288E-1,-2.9701093E-1,-1.1029265E-1,-3.3942625E-2,7.6314414E-1,-2.9518205E-1,9.148449E-2,-2.9667312E-2,-2.4345578E-1,5.914923E-1,5.503538E-2,-2.4989772E-1,1.5225235E-1,5.5591637E-1,-8.900124E-2,-4.6905506E-2,-3.3646318E-1,-2.4162127E-1,2.51206E-1,-2.571987E-1,1.3848723E-1,-1.7212442E-1,2.602887E-1,1.473693E-2,-3.308379E-1,2.255681E-1,-2.3954868E-1,3.2597113E-2,-3.012841E-1,-1.9865322E-1,1.9207884E-1,2.1157698E-1,-2.3027466E-1,-2.753462E-1,-3.668854E-2,3.2521907E-2,6.759319E-1,-6.6128045E-2,5.7157975E-1,-1.595073E-1,1.7338288E-1,3.7587056E-1,-3.655155E-2,5.0538576E-1,-9.838783E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":1,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,57,59,61,-1,63,65,67,69,71,73,75,77,79,81,-1,83,85,87,89,-1,91,93,95,97,99,101,103,-1,-1,105,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.159254E2,6.3086807E1,4.51425E1,1.9190128E1,1.1343124E1,4.267693E1,1.4175743E1,1.7632843E1,2.319367E1,1.267717E1,5.443157E0,7.364027E0,1.582045E1,1.0458664E1,1.0471215E1,5.1104374E0,1.846104E1,1.4942654E1,7.9661365E0,2.356903E0,1.722233E1,8.72715E0,1.0160326E1,4.993076E0,2.544529E0,1.8381302E1,2.3067482E1,1.242372E0,6.122528E0,2.770486E-1,8.962405E0,0E0,3.97538E0,1.6325214E1,2.6182854E0,2.5170422E-1,3.0431137E0,3.8958197E0,4.7617455E0,7.2702026E-1,3.2035902E0,8.602931E0,0E0,1.1666445E1,3.0785637E0,5.359747E0,2.3724008E0,0E0,2.229233E0,2.9647887E0,2.2907333E0,6.040142E0,9.749835E0,1.8696463E1,1.3661995E1,0E0,0E0,5.517502E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,41,41,43,43,44,44,45,45,46,46,48,48,49,49,50,50,51,51,52,52,53,53,54,54,57,57],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,-1,64,66,68,70,72,74,76,78,80,82,-1,84,86,88,90,-1,92,94,96,98,100,102,104,-1,-1,106,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.3264844E-1,-7.572781E-1,1.7677041E0,9.9985856E-1,-4.1237012E-1,-5.411535E-1,-2.0079018E-1,2.603036E-1,9.406206E-3,1.6094304E0,7.9795814E-1,-1.400339E0,-8.6145854E-1,-1.1001399E0,4.6192405E-1,-1.3647237E0,-2.523423E-1,-3.3469358E-1,-1.6082309E-1,3.382379E0,2.0928838E0,-1.4983252E-1,-1.0747923E0,-9.494192E-1,-6.86076E-1,-1.3526601E0,1.9577834E-1,-4.7465166E-1,1.2481655E0,-2.1057709E-1,-1.2340349E0,2.858268E-1,-4.7465166E-1,-1.4228727E-1,1.3356128E0,8.548993E-1,1.458546E0,4.5103574E-1,-9.395118E-1,-1.8533121E0,-6.639171E-1,-2.432298E-1,-3.3057302E-1,1.1911016E0,-2.0551927E0,1.4398967E0,-7.992597E-1,4.8174965E-1,1.9374871E0,2.3453663E-1,1.0667855E0,9.406206E-3,2.486925E0,-8.708964E-1,1.0484438E0,9.682874E-2,-2.2891994E-1,2.5453355E0,-1.1853284E-1,-5.3963672E-2,-2.963249E-1,5.336271E-1,-1.6840288E-1,-2.9701093E-1,-1.1029265E-1,-3.3942625E-2,7.6314414E-1,-2.9518205E-1,9.148449E-2,-2.9667312E-2,-2.4345578E-1,5.914923E-1,5.503538E-2,-2.4989772E-1,1.5225235E-1,5.5591637E-1,-8.900124E-2,-4.6905506E-2,-3.3646318E-1,-2.4162127E-1,2.51206E-1,-2.571987E-1,1.3848723E-1,-1.7212442E-1,2.602887E-1,1.473693E-2,-3.308379E-1,2.255681E-1,-2.3954868E-1,3.2597113E-2,-3.012841E-1,-1.9865322E-1,1.9207884E-1,2.1157698E-1,-2.3027466E-1,-2.753462E-1,-3.668854E-2,3.2521907E-2,6.759319E-1,-6.6128045E-2,5.7157975E-1,-1.595073E-1,1.7338288E-1,3.7587056E-1,-3.655155E-2,5.0538576E-1,-9.838783E-2],"split_indices":[10,1,10,3,10,2,4,9,2,3,9,0,8,5,9,1,7,2,10,0,3,1,6,9,4,9,5,0,2,1,6,0,0,5,9,7,7,0,1,6,10,0,0,8,2,8,8,0,8,6,9,2,2,3,6,0,0,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[8.111823E2,4.2158902E2,3.8959332E2,9.550404E1,3.2608496E2,3.1931393E2,7.027938E1,6.3996307E1,3.1507732E1,2.2230562E2,1.03779366E2,3.8214405E1,2.8109952E2,6.030215E1,9.977229E0,4.9424225E1,1.45720825E1,1.6790962E1,1.4716771E1,1.768776E2,4.542802E1,8.03842E1,2.3395163E1,5.093366E0,3.312104E1,5.365064E1,2.2744888E2,2.5743544E0,5.7727795E1,4.331667E0,5.645562E0,1.3790917E0,4.8045135E1,8.703023E0,5.8690586E0,2.4143589E0,1.4376603E1,1.1818191E1,2.8985808E0,1.7396709E2,2.910508E0,2.3209978E1,2.2218042E1,4.2021618E1,3.836258E1,1.2336705E1,1.1058457E1,1.477959E0,3.615407E0,3.5585089E0,2.956253E1,5.6948857E0,4.7955753E1,1.4661855E2,8.083034E1,1.0724864E0,1.501868E0,5.6570065E1,1.1577314E0,1.0467695E0,3.2848973E0,3.735755E0,1.9098073E0,1.5723965E1,3.2321167E1,3.586604E0,5.1164193E0,4.1578307E0,1.7112279E0,1.0197428E0,1.3946161E0,1.3123068E1,1.2535346E0,9.825254E0,1.9929365E0,1.5837405E0,1.3148402E0,1.1360301E0,1.7283105E2,1.8625735E0,1.0479345E0,5.7704234E0,1.7439556E1,3.6325844E1,5.6957746E0,2.3865407E0,3.597604E1,1.0433102E1,1.9036036E0,2.1477048E0,8.910753E0,2.4425275E0,1.1728795E0,2.1202998E0,1.438209E0,2.4965935E1,4.5965953E0,1.9632475E0,3.7316382E0,4.6691963E1,1.2637898E0,1.638156E1,1.3023698E2,7.300556E1,7.8247724E0,5.5479954E1,1.0901097E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"107","size_leaf_vector":"1"}},{"base_weights":[-5.3076748E-2,-5.539598E-1,3.9216515E-1,-5.826247E-2,-7.9770714E-1,7.817243E-2,6.1826074E-1,-5.8615583E-1,9.064995E-2,-9.5063144E-1,-4.969994E-1,2.5427863E-1,-7.070401E-1,-4.1393405E-1,6.650197E-1,-9.690233E-1,-5.5687014E-2,6.545268E-1,-1.1998155E-1,-1.0411633E0,-5.429089E-1,-9.2234254E-1,-3.3805996E-1,1.0899042E-1,7.499888E-1,2.597179E-1,-8.2866096E-1,3.5923013E-1,-3.445006E-1,5.714199E-1,1.1521394E0,-1.0228277E0,-4.1932657E-2,7.5038284E-1,-6.708904E-1,8.9350563E-1,-2.7243945E-1,-2.0011891E-1,1.3453225E0,-1.0609045E0,-1.699393E-2,-2.0559005E-1,-1.0325533E0,-4.9587628E-1,-1.0244173E0,1.9986936E-1,-5.3413206E-1,4.5682955E-1,-2.468487E-1,5.3148204E-1,7.1140677E-1,-9.7628653E-1,8.009851E-2,1.1475499E0,-2.1552159E-1,1.4898694E-2,7.283476E-1,1.4006371E0,4.1422546E-1,-3.2521185E-1,-8.0258794E-2,-1.7542267E-1,5.4797345E-1,-2.6006606E-1,5.7670277E-2,-9.091932E-2,3.9422545E-1,-1.270529E-2,-2.8802687E-1,5.2443415E-1,6.17967E-2,-4.0244617E-2,-3.2035822E-1,-2.0868261E-1,2.2570021E-1,-2.644378E-1,2.3678221E-1,-3.3047312E-1,-7.819475E-2,-2.5985435E-1,2.9074827E-2,-4.426788E-2,-3.232652E-1,-1.1194259E-1,2.0299998E-1,-1.7484668E-1,2.5236508E-1,3.3944184E-1,5.1552825E-2,-1.5586475E-1,2.7307585E-2,-1.6736247E-1,2.8142574E-1,-3.0190843E-1,-6.3679434E-2,-1.46428E-1,3.4808132E-1,2.7719159E-2,4.79337E-1,2.8953087E-1,-6.2130462E-2,2.3056585E-1,-3.3311132E-1,5.2361006E-1,2.466228E-1,3.5926178E-1,-1.3685239E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":2,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,-1,51,53,-1,55,57,59,-1,61,63,65,-1,67,69,71,73,75,77,79,81,83,85,87,89,91,-1,93,95,97,-1,99,101,103,105,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.7299683E2,4.4055435E1,2.911111E1,9.596224E0,1.0776001E1,2.4115055E1,1.179348E1,5.398082E0,1.1427483E1,5.3795013E0,5.4044914E0,1.0153729E1,6.8333445E0,6.635028E0,9.768593E0,5.499506E-1,6.60989E0,1.0573678E1,8.537464E0,2.6245117E0,4.966113E0,4.6811104E-1,6.765599E0,1.3861151E1,1.0091625E1,0E0,4.1283073E0,6.006654E0,0E0,1.6915615E1,5.9984016E0,3.769331E-1,0E0,9.404981E0,1.4374108E0,1.2062538E1,0E0,8.068338E0,1.3749056E0,6.8330383E-1,2.344301E0,1.401758E1,3.8101006E-1,1.4599916E0,6.40213E-1,5.0588927E0,3.6069336E0,1.0672368E1,5.1661053E0,1.3601406E1,0E0,3.8487434E-1,3.7228367E0,1.7089605E0,0E0,9.407962E0,1.2028198E1,3.8342018E0,3.821558E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,26,26,27,27,29,29,30,30,31,31,33,33,34,34,35,35,37,37,38,38,39,39,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,48,48,49,49,51,51,52,52,53,53,55,55,56,56,57,57,58,58],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,-1,52,54,-1,56,58,60,-1,62,64,66,-1,68,70,72,74,76,78,80,82,84,86,88,90,92,-1,94,96,98,-1,100,102,104,106,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.3264844E-1,-6.35789E-1,8.45365E-1,-5.9036255E-1,-4.1237012E-1,3.3612394E-1,-1.5456452E0,-3.8442045E-1,-1.0831622E0,1.6094304E0,-3.445578E-1,6.6354454E-1,-6.575297E-1,2.1961388E-1,1.201199E0,9.981511E-1,-4.5770618E-1,1.3226638E0,1.9404742E0,3.382379E0,1.4247327E-1,-5.4334486E-1,-7.447781E-1,-4.5770618E-1,1.1254588E0,2.597179E-1,-5.1127385E-2,-1.6700429E0,-3.445006E-1,-7.0572996E-1,-5.135612E-1,1.1793455E0,-4.1932657E-2,-5.491631E-1,5.291309E-1,-2.432298E-1,-2.7243945E-1,9.015902E-1,-4.9621913E-1,-1.8533121E0,5.963377E-1,-1.6608919E-1,-8.0366546E-1,-1.4228727E-1,-1.6586821E0,-7.697409E-2,2.4350774E0,-8.4839493E-1,1.9577834E-1,-6.9653356E-1,7.1140677E-1,2.1862822E0,3.2031852E-1,-5.5560064E-1,-2.1552159E-1,-1.764844E0,2.5557446E0,3.53506E-1,-6.855323E-1,-3.2521185E-1,-8.0258794E-2,-1.7542267E-1,5.4797345E-1,-2.6006606E-1,5.7670277E-2,-9.091932E-2,3.9422545E-1,-1.270529E-2,-2.8802687E-1,5.2443415E-1,6.17967E-2,-4.0244617E-2,-3.2035822E-1,-2.0868261E-1,2.2570021E-1,-2.644378E-1,2.3678221E-1,-3.3047312E-1,-7.819475E-2,-2.5985435E-1,2.9074827E-2,-4.426788E-2,-3.232652E-1,-1.1194259E-1,2.0299998E-1,-1.7484668E-1,2.5236508E-1,3.3944184E-1,5.1552825E-2,-1.5586475E-1,2.7307585E-2,-1.6736247E-1,2.8142574E-1,-3.0190843E-1,-6.3679434E-2,-1.46428E-1,3.4808132E-1,2.7719159E-2,4.79337E-1,2.8953087E-1,-6.2130462E-2,2.3056585E-1,-3.3311132E-1,5.2361006E-1,2.466228E-1,3.5926178E-1,-1.3685239E-2],"split_indices":[10,1,10,0,10,1,8,10,10,3,9,9,4,0,9,4,4,5,9,0,0,4,3,4,5,0,3,8,0,5,3,2,0,7,9,0,0,7,10,6,9,0,10,5,6,10,8,1,5,1,0,8,8,3,0,6,2,2,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[7.7374725E2,3.6400687E2,4.097404E2,1.2039809E2,2.436088E2,1.7209178E2,2.3764859E2,2.5844234E1,9.4553856E1,1.6005289E2,8.35559E1,1.4111818E2,3.0973597E1,9.958739E0,2.2768985E2,1.45302515E1,1.1313982E1,2.5141272E1,6.941258E1,1.2970027E2,3.0352615E1,2.1422726E1,6.213318E1,1.1007581E2,3.104237E1,1.7840855E0,2.918951E1,5.100198E0,4.8585415E0,1.9281386E2,3.4876003E1,1.3425705E1,1.1045469E0,4.8021874E0,6.5117946E0,2.217781E1,2.9634597E0,6.663191E1,2.7806659E0,1.2721233E2,2.4879346E0,1.8812473E1,1.1540143E1,5.268599E0,1.6154127E1,1.6592318E1,4.554086E1,5.5520107E1,5.4555706E1,2.852583E1,2.5165417E0,2.504647E1,4.1430397E0,2.9080222E0,2.1921754E0,4.2651493E1,1.5016235E2,2.5418158E1,9.457845E0,1.2016596E1,1.4091091E0,2.3470728E0,2.4551144E0,5.3031974E0,1.2085971E0,5.840549E0,1.6337263E1,5.6051495E1,1.05804205E1,1.65961E0,1.1210557E0,1.0988638E0,1.2611347E2,1.3728145E0,1.11512E0,1.1270056E1,7.5424166E0,1.0200888E1,1.3392551E0,2.9589915E0,2.3096077E0,1.1387373E0,1.501539E1,7.634559E0,8.957758E0,4.454028E1,1.0005805E0,1.5606029E1,3.9914078E1,2.9891981E1,2.4663727E1,7.652232E0,2.0873596E1,2.379271E1,1.2537626E0,3.0741243E0,1.0689155E0,1.2302073E0,1.6778147E0,7.4437003E0,3.5207794E1,1.4751875E2,2.6436045E0,1.4664017E1,1.0754141E1,2.9050453E0,6.552799E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"107","size_leaf_vector":"1"}},{"base_weights":[-5.339817E-2,-4.716882E-1,2.684219E-1,9.6792415E-2,-6.4967865E-1,3.1085286E-1,-5.740462E-1,-1.932835E-1,6.7588407E-1,-8.251345E-1,-3.8677022E-1,2.4353373E-1,7.7755326E-1,-9.704921E-1,2.5453423E-3,-4.1466895E-1,3.5902104E-1,1.1273052E0,1.4754839E-1,-9.841336E-1,-3.91839E-1,-2.762579E-1,-1.000651E0,1.4634331E0,2.2279586E-1,9.011879E-1,-1.8820313E-1,-3.219204E-1,-7.749632E-2,-4.2983922E-1,3.0515984E-1,-5.908327E-1,1.2866847E-1,8.1590337E-1,-5.065103E-1,-2.1281171E-1,4.164572E-1,1.075344E0,-4.317877E-1,-3.5900384E-2,-1.0163829E0,8.897256E-2,-3.0428028E-1,2.8347338E-3,-6.1266255E-1,-3.175833E-1,-9.4967894E-2,5.747124E-2,5.676552E-1,-3.985694E-1,2.662364E-1,-1.0061878E-1,9.5867276E-1,-6.2868947E-1,3.0914736E-1,1.2376711E-1,-2.9245004E-1,2.042404E-1,-2.0106605E-1,3.115494E-1,-1.5018778E-1,-1.7412143E-2,3.7365097E-1,1.5046388E-1,-2.821996E-1,-8.739498E-2,4.500091E-1,-2.9679692E-1,1.3326603E-1,-1.5125123E-1,1.8510363E-1,-3.0748644E-1,-2.6028764E-2,-1.2513706E-1,1.7725366E-1,-1.85462E-1,1.0250891E-1,-2.852006E-1,-1.9586204E-2,-2.0057002E-1,1.168424E-1,7.284866E-2,4.0631565E-1,2.2963123E-1,-2.7945128E-1,1.7944093E-1,3.5988086E-1,-2.6995647E-1,7.87739E-3,2.1064876E-1,-6.611949E-2,-2.0153771E-1,2.1301752E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":3,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,-1,-1,55,-1,57,59,61,63,-1,-1,65,67,69,71,73,-1,75,77,-1,-1,-1,-1,79,81,83,85,87,89,91,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[9.905034E1,3.2432068E1,1.504784E1,1.3090164E1,1.0924973E1,1.2273113E1,4.6420717E0,6.5289054E0,6.0275793E0,9.625267E0,6.4700823E0,8.48073E0,6.085737E0,6.016054E-1,4.502837E0,3.685711E0,6.5294523E0,7.6229687E0,7.514383E0,3.1939774E0,1.2261361E1,8.043771E0,2.5130367E-1,2.7127523E0,9.377491E0,2.6359978E0,1.6075027E0,0E0,0E0,2.2932463E0,0E0,3.3493252E0,6.276735E0,4.016732E0,3.0804343E0,0E0,0E0,3.5295644E0,4.731544E0,1.6903493E0,7.2488403E-1,6.3078713E0,0E0,1.0272942E1,7.124789E0,0E0,0E0,0E0,0E0,5.1210275E0,7.8634663E0,3.0862906E0,2.9133797E0,6.897019E-1,9.248061E-1,2.5570807E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,29,29,31,31,32,32,33,33,34,34,37,37,38,38,39,39,40,40,41,41,43,43,44,44,49,49,50,50,51,51,52,52,53,53,54,54,55,55],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,-1,-1,56,-1,58,60,62,64,-1,-1,66,68,70,72,74,-1,76,78,-1,-1,-1,-1,80,82,84,86,88,90,92,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.3264844E-1,-8.1802267E-1,1.551015E0,9.9985856E-1,-4.9621913E-1,1.9354022E0,6.6354454E-1,1.2588996E-1,-5.9413753E-2,1.6094304E0,9.7766924E-1,-1.2736068E0,6.2878585E-1,-1.1411514E-1,1.3065345E0,-7.697409E-2,-2.523423E-1,-3.3469358E-1,8.015383E-1,-7.572781E-1,2.0928838E0,-2.834341E-2,1.0116473E0,-6.004373E-1,-8.9191616E-1,-1.3590488E0,-1.1507303E-1,-3.219204E-1,-7.749632E-2,1.2684059E0,3.0515984E-1,-1.3647237E0,-7.4779874E-1,-1.4228727E-1,-2.8642884E-1,-2.1281171E-1,4.164572E-1,-9.494192E-1,1.6019539E0,4.4686025E-1,5.0319877E0,1.8406473E0,-3.0428028E-1,-3.445578E-1,7.3075134E-1,-3.175833E-1,-9.4967894E-2,5.747124E-2,5.676552E-1,4.447161E-1,1.8861065E0,-1.7131008E0,-6.396795E-1,-1.6673584E-1,2.1463482E-1,-6.396795E-1,-2.9245004E-1,2.042404E-1,-2.0106605E-1,3.115494E-1,-1.5018778E-1,-1.7412143E-2,3.7365097E-1,1.5046388E-1,-2.821996E-1,-8.739498E-2,4.500091E-1,-2.9679692E-1,1.3326603E-1,-1.5125123E-1,1.8510363E-1,-3.0748644E-1,-2.6028764E-2,-1.2513706E-1,1.7725366E-1,-1.85462E-1,1.0250891E-1,-2.852006E-1,-1.9586204E-2,-2.0057002E-1,1.168424E-1,7.284866E-2,4.0631565E-1,2.2963123E-1,-2.7945128E-1,1.7944093E-1,3.5988086E-1,-2.6995647E-1,7.87739E-3,2.1064876E-1,-6.611949E-2,-2.0153771E-1,2.1301752E-1],"split_indices":[10,1,1,3,10,10,9,9,2,3,6,1,2,5,10,10,7,2,7,1,3,1,7,4,3,8,8,0,0,9,0,1,9,5,4,0,0,9,7,6,9,3,0,9,9,0,0,0,0,8,5,7,3,3,1,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[7.338689E2,3.1890372E2,4.1496518E2,7.614899E1,2.4275473E2,3.9566833E2,1.9296843E1,5.1178913E1,2.4970074E1,1.443093E2,9.844544E1,3.471199E2,4.854843E1,1.1029889E1,8.266954E0,3.6712353E1,1.4466558E1,1.2853813E1,1.2116261E1,1.04640045E2,3.9669247E1,8.4655556E1,1.3789883E1,4.639415E0,3.424805E2,4.3097954E1,5.4504757E0,9.198282E0,1.8316069E0,6.2008805E0,2.066074E0,2.7657633E1,9.05472E0,9.506018E0,4.960541E0,1.384886E0,1.1468927E1,4.3283405E0,7.7879205E0,3.5112581E0,1.01128784E2,2.2864223E1,1.6805025E1,4.681108E1,3.7844475E1,1.2208079E1,1.5818036E0,1.5344316E0,3.1049833E0,2.1844587E1,3.206359E2,2.2980874E0,4.079987E1,2.7504332E0,2.7000422E0,3.4595568E0,2.7413235E0,1.1800364E0,2.6477596E1,3.4362931E0,5.618427E0,3.4178698E0,6.0881476E0,1.4465647E0,3.513976E0,1.1006E0,3.2277405E0,4.6799874E0,3.107933E0,2.1774201E0,1.3338381E0,1.0010321E2,1.0255804E0,1.1468293E1,1.139593E1,1.6234072E1,3.057701E1,2.2941341E1,1.4903134E1,1.6383078E1,5.4615088E0,3.1508124E2,5.5546575E0,1.1637969E0,1.1342905E0,1.7738285E1,2.3061583E1,1.6806612E0,1.0697721E0,1.3627056E0,1.3373365E0,1.4059455E0,2.0536113E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"93","size_leaf_vector":"1"}},{"base_weights":[-4.948895E-2,-4.90968E-1,1.4380114E-1,-1.3348636E-1,-7.8388673E-1,3.2088053E-1,-4.4442106E-2,-6.712703E-1,2.0029178E-2,-8.592403E-1,-4.2869017E-1,-6.899598E-1,3.759036E-1,-1.9875963E-1,4.4638118E-1,-9.761536E-1,7.3906615E-2,7.547551E-1,-1.23968355E-1,-5.193606E-1,-1.0016959E0,-9.6045774E-1,1.9705239E-1,-9.9035525E-1,3.230531E-1,2.1623969E-1,6.064704E-1,-7.093905E-1,-1.1976289E-1,9.781375E-1,2.4996204E-3,-3.1037217E-1,-3.0181147E-2,7.112918E-1,-2.4519612E-1,-1.6029565E-1,1.6494888E0,3.7400573E-1,-4.877387E-1,-8.536979E-1,1.5229672E-3,-3.0801553E-1,-5.612119E-1,-3.1044656E-1,-7.060313E-2,1.0540961E0,-6.158127E-1,-2.1260327E-1,-3.5011283E-1,3.1531593E-1,-2.084146E-1,9.059916E-2,6.7443246E-1,6.455152E-1,-2.887119E-1,-1.6264828E-1,-9.651867E-1,8.360368E-1,-1.5771054E-1,1.3271277E0,1.6467553E-1,-4.4139087E-1,3.9762577E-1,-2.0565817E-1,4.6806753E-1,-2.7049735E-1,2.258509E-1,-1.9711901E-1,7.046665E-1,-2.0162706E-1,1.8593118E-1,-2.7696177E-1,-7.852556E-2,-2.5789501E-2,-2.6844788E-1,-2.1031325E-1,1.343525E-1,1.24100514E-1,-2.581018E-1,6.202164E-1,-1.433865E-1,-5.064122E-2,-2.509415E-1,-1.8457349E-1,9.034931E-2,-6.9258384E-2,8.735289E-2,5.391435E-2,3.047715E-1,1.2569694E-1,2.8428563E-1,1.9614209E-1,-2.010973E-1,-3.1963652E-1,-9.493751E-2,4.0412176E-1,-2.5024961E-3,-3.369434E-2,-2.770532E-1,4.423162E-1,-2.6510044E-3,-1.0759152E-1,3.0265716E-1,-2.1089844E-1,9.366438E-2,-1.5625994E-1,2.2253084E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":4,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,57,59,61,-1,-1,63,-1,65,67,69,71,73,75,-1,77,-1,-1,79,81,83,-1,-1,-1,85,87,89,-1,91,93,95,97,99,101,103,105,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.9683586E1,2.2193993E1,1.6235865E1,8.030735E0,2.7588882E0,1.4212988E1,1.8021698E1,4.9781666E0,8.188923E0,4.0985413E0,7.6576552E0,4.3683023E0,8.651482E0,7.2090087E0,1.3437842E1,6.833563E-1,4.6710353E0,1.0645819E1,1.1927459E1,5.269555E0,1.5283203E-1,4.199009E-1,8.410559E0,1.1957312E0,3.543348E0,8.143095E0,6.566555E0,3.2027035E0,5.8552313E0,7.1353054E0,5.794677E0,0E0,0E0,6.681021E0,0E0,5.5395393E0,1.20287285E1,7.5986176E0,3.488181E0,5.049076E-1,4.3353176E0,0E0,2.0358863E0,0E0,0E0,9.979479E0,4.8504782E-1,8.4059566E-1,0E0,0E0,0E0,7.3532186E0,4.9722004E0,6.124737E0,0E0,4.1104894E0,7.141943E-1,2.6798663E0,5.2017736E0,3.5513554E0,4.301754E0,3.287416E0,5.910386E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,33,33,35,35,36,36,37,37,38,38,39,39,40,40,42,42,45,45,46,46,47,47,51,51,52,52,53,53,55,55,56,56,57,57,58,58,59,59,60,60,61,61,62,62],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,-1,-1,64,-1,66,68,70,72,74,76,-1,78,-1,-1,80,82,84,-1,-1,-1,86,88,90,-1,92,94,96,98,100,102,104,106,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.6082309E-1,-5.1429987E-1,-4.5770618E-1,-4.7465166E-1,1.4202529E0,-1.1001399E0,1.201199E0,6.313125E-1,-3.5239425E-1,-1.0747923E0,-7.573051E-2,5.397121E-2,1.9592093E-1,-6.0997343E-1,-2.834341E-2,9.981511E-1,-1.4562023E-1,-3.1497508E-1,-1.0831622E0,-4.9621913E-1,1.377698E0,1.7687072E0,-2.1057709E-1,-6.0997343E-1,7.4762195E-1,1.7677041E0,1.8675451E0,-1.5171331E0,-1.6897553E0,6.9351125E-1,6.4971733E-1,-3.1037217E-1,-3.0181147E-2,-7.447781E-1,-2.4519612E-1,-8.523691E-3,-1.0552125E0,1.044998E0,-1.1411514E-1,-2.7132162E-1,7.3075134E-1,-3.0801553E-1,1.5020956E0,-3.1044656E-1,-7.060313E-2,1.3184733E0,-1.6608919E-1,-5.461783E-1,-3.5011283E-1,3.1531593E-1,-2.084146E-1,-6.396795E-1,-1.1411514E-1,-6.289329E-1,-2.887119E-1,1.551015E0,1.8752882E0,-5.461783E-1,1.1469966E0,-4.294823E-1,3.9868048E-1,2.1420946E0,3.3612394E-1,-2.0565817E-1,4.6806753E-1,-2.7049735E-1,2.258509E-1,-1.9711901E-1,7.046665E-1,-2.0162706E-1,1.8593118E-1,-2.7696177E-1,-7.852556E-2,-2.5789501E-2,-2.6844788E-1,-2.1031325E-1,1.343525E-1,1.24100514E-1,-2.581018E-1,6.202164E-1,-1.433865E-1,-5.064122E-2,-2.509415E-1,-1.8457349E-1,9.034931E-2,-6.9258384E-2,8.735289E-2,5.391435E-2,3.047715E-1,1.2569694E-1,2.8428563E-1,1.9614209E-1,-2.010973E-1,-3.1963652E-1,-9.493751E-2,4.0412176E-1,-2.5024961E-3,-3.369434E-2,-2.770532E-1,4.423162E-1,-2.6510044E-3,-1.0759152E-1,3.0265716E-1,-2.1089844E-1,9.366438E-2,-1.5625994E-1,2.2253084E-1],"split_indices":[10,1,4,0,3,5,9,8,7,6,9,3,8,2,1,4,7,4,10,10,8,7,1,2,3,10,2,6,7,8,10,0,0,3,0,9,10,7,5,1,9,0,8,0,0,7,0,9,0,0,0,3,5,0,0,1,8,9,3,3,4,9,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[6.9758813E2,2.1194981E2,4.8563834E2,9.611077E1,1.1583904E2,2.4987637E2,2.3576196E2,2.0594173E1,7.5516594E1,9.4394554E1,2.1444489E1,1.2303669E1,2.3757271E2,1.7982047E2,5.594149E1,1.4394724E1,6.199449E0,1.167995E1,6.3836647E1,2.9251112E1,6.514344E1,1.1303585E1,1.0140904E1,9.506894E0,2.7967756E0,1.4151419E2,9.605851E1,2.302273E1,1.5679774E2,2.490385E1,3.1037638E1,1.33240595E1,1.0706644E0,3.7321773E0,2.4672718E0,6.1802297E0,5.4997206E0,2.6935646E1,3.6901E1,1.742662E1,1.1824491E1,6.038242E1,4.7610207E0,9.864785E0,1.4388E0,4.791852E0,5.349052E0,2.1678061E0,7.339088E0,1.6114309E0,1.1853447E0,1.1200042E2,2.951378E1,9.430015E1,1.7583643E0,7.859526E0,1.5163204E1,5.184433E0,1.5161331E2,1.6985361E1,7.9184904E0,1.4561711E1,1.6475927E1,1.4835899E0,2.2485874E0,3.417866E0,2.7623634E0,1.2937125E0,4.206008E0,4.794254E0,2.2141392E1,1.1553217E1,2.5347782E1,1.0432202E0,1.63834E1,4.371858E0,7.452633E0,1.0279459E0,3.7330747E0,2.6739128E0,2.1179395E0,2.351138E0,2.9979138E0,1.104606E0,1.0632E0,4.2974762E1,6.902566E1,1.267574E1,1.683804E1,5.525522E1,3.904493E1,2.9030068E0,4.956519E0,1.2576923E1,2.5862808E0,2.8588526E0,2.3255804E0,1.4421123E2,7.402074E0,1.5205669E1,1.7796906E0,5.2432985E0,2.6751919E0,1.0859234E1,3.7024772E0,4.3507104E0,1.21252165E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"107","size_leaf_vector":"1"}},{"base_weights":[-4.8418824E-2,-4.2839313E-1,9.985452E-2,-5.5036858E-2,-6.7085147E-1,-4.438511E-2,2.4134463E-1,-7.3169273E-1,7.206271E-2,-2.863142E-1,-7.6431346E-1,3.9615374E-2,-5.4198E-1,1.3079173E-2,3.653227E-1,-9.440471E-1,1.3388392E-1,-6.969165E-1,1.6123737E-1,1.6291708E-1,-7.584856E-1,-7.922046E-1,2.0470013E-1,-6.486028E-2,3.668683E-1,4.77504E-1,-6.258608E-1,-1.9737509E-1,4.257478E-1,6.00586E-2,5.0729066E-1,-5.7346042E-2,-3.0537271E-1,-2.66245E-1,-2.4949716E-1,2.4797547E-1,-6.63696E-1,-6.0172796E-1,1.0203044E0,-2.0949377E-2,-8.7947565E-1,-8.94471E-1,-4.7396109E-1,6.423718E-3,-7.9276884E-1,8.9682144E-1,1.5244192E-1,-4.3307565E-2,2.6210678E-1,1.4073656E-1,-7.041736E-1,4.3832082E-1,-3.9408836E-1,6.848592E-1,-4.8993024E-1,4.931923E-1,-1.4940107E-1,-1.9963865E-1,6.2150323E-1,1.1246227E-1,-2.0326322E-1,1.01069756E-1,-1.8551022E-1,-2.725192E-1,5.2233413E-2,-2.8791365E-1,1.2699603E-1,-1.9968463E-1,4.6898696E-1,-9.80969E-2,-2.8791338E-1,6.5027006E-2,-2.7549297E-1,-2.1979378E-1,-4.2631175E-2,3.7152833E-1,-1.0871802E-2,-3.005677E-1,-6.0709603E-2,3.2050753E-1,-1.1335481E-1,-5.6161642E-2,2.6137635E-1,-1.9631885E-1,2.4153583E-1,-9.338133E-2,-2.810601E-1,4.277818E-1,4.85674E-2,-1.4054744E-1,2.1380441E-1,2.4535842E-1,-3.0029976E-1,-2.8682944E-1,-2.0690445E-2,7.596764E-3,3.3498403E-1,-1.1110182E-1,8.787759E-2,2.3041366E-1,-1.3832559E-1,2.1202673E-1,-5.8121692E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":5,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,-1,33,35,37,39,41,-1,43,45,47,49,51,53,55,57,-1,-1,-1,59,61,63,65,67,-1,69,71,73,75,77,79,81,-1,-1,83,85,87,89,91,93,95,97,99,101,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.7487183E1,1.6867317E1,9.783827E0,6.469575E0,3.8468971E0,9.968535E0,6.840887E0,3.395124E0,4.4821405E0,5.1545725E0,4.188408E0,7.0169625E0,3.2439547E0,7.568311E0,6.761423E0,3.8340473E-1,0E0,3.5919595E-1,4.308525E0,9.122039E0,9.168272E-1,2.4778595E0,0E0,8.094016E0,5.5050063E0,9.1092473E-1,2.0354614E0,7.394946E0,7.3163304E0,4.689997E0,8.75013E0,0E0,0E0,0E0,1.1693976E0,4.3015428E0,1.3435583E0,3.1543071E0,6.9805713E0,0E0,1.2504196E-1,1.9566879E0,1.9399552E0,7.5599837E0,1.5150204E0,3.4859552E0,9.086689E0,0E0,0E0,2.5464509E0,2.399148E0,3.6245272E0,4.009262E0,6.054261E0,1.321926E0,4.8772907E0,3.535244E0,4.3051615E0,6.546364E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,17,17,18,18,19,19,20,20,21,21,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,34,34,35,35,36,36,37,37,38,38,40,40,41,41,42,42,43,43,44,44,45,45,46,46,49,49,50,50,51,51,52,52,53,53,54,54,55,55,56,56,57,57,58,58],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,-1,34,36,38,40,42,-1,44,46,48,50,52,54,56,58,-1,-1,-1,60,62,64,66,68,-1,70,72,74,76,78,80,82,-1,-1,84,86,88,90,92,94,96,98,100,102,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.6082309E-1,-6.35789E-1,8.45365E-1,-3.3469358E-1,-8.6145854E-1,3.968685E-1,-3.9281076E-1,-3.8442045E-1,-6.493857E-1,-2.1057709E-1,3.6878514E0,6.6354454E-1,-6.575297E-1,7.529394E-2,-6.712091E-1,-8.498767E-1,1.3388392E-1,-1.7224395E-1,1.773418E0,-1.1102537E0,-1.2871158E0,-4.1237012E-1,2.0470013E-1,2.4124733E-1,9.406206E-3,-1.4228727E-1,-9.236573E-1,-6.5014505E-1,-7.826177E-1,-1.6897553E0,-9.858561E-1,-5.7346042E-2,-3.0537271E-1,-2.66245E-1,-3.0166942E-1,8.367388E-1,1.5615591E0,-8.3161515E-1,2.521226E-1,-2.0949377E-2,-1.6608919E-1,-6.575297E-1,5.069149E-1,-1.2432345E0,3.2751042E-1,5.013824E-1,6.414551E-1,-4.3307565E-2,2.6210678E-1,-2.5788262E-1,-9.5407325E-1,-1.1521177E0,-6.225345E-1,1.2607673E0,2.641684E-1,-6.133851E-1,9.995786E-1,-1.3526601E0,7.2995836E-1,1.1246227E-1,-2.0326322E-1,1.01069756E-1,-1.8551022E-1,-2.725192E-1,5.2233413E-2,-2.8791365E-1,1.2699603E-1,-1.9968463E-1,4.6898696E-1,-9.80969E-2,-2.8791338E-1,6.5027006E-2,-2.7549297E-1,-2.1979378E-1,-4.2631175E-2,3.7152833E-1,-1.0871802E-2,-3.005677E-1,-6.0709603E-2,3.2050753E-1,-1.1335481E-1,-5.6161642E-2,2.6137635E-1,-1.9631885E-1,2.4153583E-1,-9.338133E-2,-2.810601E-1,4.277818E-1,4.85674E-2,-1.4054744E-1,2.1380441E-1,2.4535842E-1,-3.0029976E-1,-2.8682944E-1,-2.0690445E-2,7.596764E-3,3.3498403E-1,-1.1110182E-1,8.787759E-2,2.3041366E-1,-1.3832559E-1,2.1202673E-1,-5.8121692E-2],"split_indices":[10,1,10,2,8,1,1,10,5,1,9,9,4,6,3,3,0,4,5,8,6,10,0,7,2,5,8,6,7,7,8,0,0,0,8,0,6,10,5,0,0,4,8,1,9,7,7,0,0,4,2,1,7,6,3,9,9,9,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[6.636142E2,1.8573897E2,4.778752E2,7.361219E1,1.12126785E2,2.3697821E2,2.40897E2,1.0888242E1,6.2723946E1,2.2718369E1,8.940842E1,2.0353862E2,3.3439575E1,8.517689E1,1.5572012E2,9.393456E0,1.4947853E0,5.8097324E0,5.691421E1,1.1977628E1,1.0740741E1,8.8161064E1,1.2473481E0,1.5489131E2,4.8647324E1,2.1854184E0,3.1254158E1,5.6754654E1,2.8422235E1,4.9883026E1,1.058371E2,1.1597558E0,8.233701E0,3.3845923E0,2.4251404E0,5.2132145E1,4.782066E0,6.488851E0,5.488777E0,1.8407395E0,8.900002E0,6.535033E1,2.2810741E1,1.4197855E2,1.29127655E1,1.3096643E1,3.555068E1,1.0978435E0,1.087575E0,2.8229964E0,2.8431162E1,1.3121896E1,4.363276E1,2.2349907E1,6.072327E0,1.5818323E1,3.40647E1,1.4616847E1,9.1220245E1,1.0368943E0,1.3882462E0,4.7833515E1,4.2986326E0,3.6358695E0,1.1461966E0,4.856517E0,1.6323339E0,1.2821072E0,4.20667E0,1.77222E0,7.1277814E0,1.2025337E0,6.41478E1,1.2139896E1,1.0670844E1,3.8139164E0,1.3816463E2,9.020652E0,3.8921137E0,1.1685887E1,1.4107561E0,2.4645128E1,1.090555E1,1.2918924E0,1.5311041E0,1.1444617E1,1.6986546E1,1.9601864E0,1.116171E1,4.142467E1,2.2080877E0,2.1192816E1,1.1570923E0,2.2782273E0,3.7940998E0,9.630999E0,6.1873255E0,2.2826035E1,1.12386675E1,2.6969683E0,1.1919879E1,8.2704636E1,8.515616E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"103","size_leaf_vector":"1"}},{"base_weights":[-4.3514144E-2,-3.639623E-1,7.0701174E-2,-1.6860127E-1,-7.498408E-1,9.750429E-2,-5.7888895E-1,-6.8214285E-1,-2.8909592E-2,-3.9658841E-1,-9.3758315E-1,1.1391882E-1,-7.345612E-1,-6.823403E-1,1.8592273E-1,-9.546812E-1,2.0475863E-3,3.3413403E-2,-7.4225116E-1,-7.402887E-1,1.13327794E-1,-2.883082E-1,-9.155629E-2,2.6198468E-1,2.9786343E-2,-3.297001E-1,2.5113383E-1,-8.4852123E-1,-2.3198897E-1,-3.0107749E-1,-7.9418704E-2,-2.1455903E-1,3.2614306E-1,-2.9513618E-1,2.1145238E-1,3.226954E-2,-2.8399193E-1,-7.8865045E-1,-3.6126416E-2,3.2788357E-1,-3.2102248E-1,2.8976384E-1,-2.8991333E-1,-1.06955945E-1,1.7559412E-1,-8.967045E-2,2.1791191E-1,-9.923777E-1,-1.3180283E-1,1.00485906E-1,-2.1498409E-1,3.563566E-1,-1.6205718E-1,2.4474198E-1,-1.3390405E-1,2.0838049E-1,1.532409E-3,-1.3123672E-1,-2.7502474E-1,-2.231753E-1,1.4467794E-1,1.0275661E-1,-8.465177E-2,1.04301125E-1,-8.477427E-2,-2.1885698E-1,6.868952E-2,-1.1744573E-1,-3.3731934E-1,1.4371021E-1,-2.0050626E-1,-9.954521E-2,2.3160547E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":6,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,-1,29,31,33,35,37,39,-1,-1,41,43,-1,45,47,49,-1,-1,-1,51,53,55,-1,-1,57,-1,-1,59,61,-1,63,65,-1,-1,67,69,71,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.3333717E1,1.2500694E1,8.245346E0,8.050152E0,3.3426857E0,6.264545E0,2.7517977E0,4.479372E0,4.0028653E0,3.7940884E0,2.3804855E-1,5.53136E0,3.4692054E0,1.1327095E0,0E0,3.138628E-1,2.055232E0,4.9319324E0,1.3580766E0,2.8811073E-1,4.3467336E0,0E0,0E0,5.773281E0,5.6946154E0,0E0,1.0570973E0,1.1683788E0,1.0653135E0,0E0,0E0,0E0,5.1845694E0,5.3490343E0,5.437109E0,0E0,0E0,1.24135494E-1,0E0,0E0,2.8315477E0,4.870039E0,0E0,1.1877169E1,6.840554E0,0E0,0E0,3.8017082E-1,1.3884541E0,1.5722336E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,15,15,16,16,17,17,18,18,19,19,20,20,23,23,24,24,26,26,27,27,28,28,32,32,33,33,34,34,37,37,40,40,41,41,43,43,44,44,47,47,48,48,49,49],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,-1,30,32,34,36,38,40,-1,-1,42,44,-1,46,48,50,-1,-1,-1,52,54,56,-1,-1,58,-1,-1,60,62,-1,64,66,-1,-1,68,70,72,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.6082309E-1,-2.1057709E-1,1.551015E0,-4.7465166E-1,-1.0747923E0,1.3492355E0,1.4546254E0,6.313125E-1,1.773418E0,-4.1237012E-1,1.9331255E0,-1.0994488E0,6.2878585E-1,1.8130895E0,1.8592273E-1,9.981511E-1,-3.9908516E-1,1.3943407E-1,-9.395118E-1,1.5234454E0,3.9868048E-1,-2.883082E-1,-9.155629E-2,1.1321826E-1,-1.4228727E-1,-3.297001E-1,-2.8642884E-1,4.2232597E-1,3.1303735E0,-3.0107749E-1,-7.9418704E-2,-2.1455903E-1,-1.4562023E-1,-5.491631E-1,5.1763475E-1,3.226954E-2,-2.8399193E-1,7.1816164E-1,-3.6126416E-2,3.2788357E-1,1.0266974E0,2.9675448E-1,-2.8991333E-1,-1.4994395E0,-6.0997343E-1,-8.967045E-2,2.1791191E-1,-2.0551927E0,2.0709887E0,-4.7465166E-1,-2.1498409E-1,3.563566E-1,-1.6205718E-1,2.4474198E-1,-1.3390405E-1,2.0838049E-1,1.532409E-3,-1.3123672E-1,-2.7502474E-1,-2.231753E-1,1.4467794E-1,1.0275661E-1,-8.465177E-2,1.04301125E-1,-8.477427E-2,-2.1885698E-1,6.868952E-2,-1.1744573E-1,-3.3731934E-1,1.4371021E-1,-2.0050626E-1,-9.954521E-2,2.3160547E-1],"split_indices":[10,1,1,0,6,6,2,8,5,10,6,7,2,8,0,4,7,5,1,2,4,0,0,4,5,0,4,2,1,0,0,0,7,7,6,0,0,7,0,0,4,0,0,6,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[6.358286E2,1.6650035E2,4.6932828E2,1.114917E2,5.5008648E1,4.5154663E2,1.7781624E1,2.3011707E1,8.8479996E1,2.0169853E1,3.4838795E1,4.436575E2,7.889141E0,1.6765373E1,1.0162504E0,1.6173758E1,6.8379498E0,8.224739E1,6.232604E0,1.1778783E1,8.391071E0,3.309234E1,1.7464566E0,1.5998457E2,2.836729E2,5.675719E0,2.2134223E0,1.1600591E1,5.1647835E0,1.4678617E1,1.4951404E0,1.7525953E0,5.085355E0,2.867444E1,5.357295E1,1.3002186E0,4.9323854E0,1.0673734E1,1.1050487E0,2.1118367E0,6.2792344E0,1.5719377E2,2.7908006E0,1.4652472E2,1.371482E2,1.1979647E0,1.0154576E0,9.34108E0,2.2595115E0,3.5333762E0,1.6314071E0,2.3634164E0,2.7219381E0,2.908747E0,2.5765694E1,1.5324607E1,3.824834E1,4.032697E0,6.641037E0,4.1620097E0,2.117225E0,1.4428532E2,1.2908462E1,4.0557396E1,1.0596733E2,6.9313045E0,1.3021689E2,2.396898E0,6.9441814E0,1.1067407E0,1.1527709E0,2.4573326E0,1.0760436E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"73","size_leaf_vector":"1"}},{"base_weights":[-4.0470764E-2,-3.0270293E-1,4.5457512E-2,-1.3681926E-1,-6.8173826E-1,1.2462556E-1,-1.0531642E-1,-6.249758E-1,-2.5111217E-2,-3.365137E-1,-8.9449054E-1,-5.811407E-1,1.5772776E-1,-3.9767465E-1,1.2611942E-1,-9.08469E-1,-1.21503975E-2,-7.226893E-2,8.608265E-1,-6.754985E-1,8.820056E-2,-9.526689E-1,-3.992374E-1,-8.955386E-1,3.7396133E-1,6.0778324E-2,3.9608407E-1,-3.015975E-1,-2.9861182E-1,3.3415005E-1,-1.2529834E-1,-2.9037774E-1,-6.8136916E-2,-5.1535934E-1,3.679797E-1,6.809386E-2,-3.912791E-1,4.033761E-1,-3.1458295E-3,-2.5543766E-2,-7.2943294E-1,6.9797546E-1,-4.5386067E-1,-1.02506794E-1,-2.9688054E-1,5.6343574E-2,-2.1622014E-1,-3.2288784E-1,-3.018656E-1,9.25705E-1,-1.91335E-1,2.0039134E-1,-1.3192837E-1,9.128668E-2,6.252051E-1,-9.005227E-1,-1.5673418E-1,3.3301234E-2,6.762507E-1,-2.911858E-1,8.8640195E-1,-2.3269052E-2,-1.9835198E-1,-1.7095366E-1,2.3201649E-1,-2.1709181E-1,5.066097E-2,3.892342E-2,-2.724139E-1,-1.0988209E-1,-2.6279417E-1,-1.14160694E-1,4.7183162E-1,-2.714295E-1,1.4117171E-1,5.5616513E-2,-2.2150561E-1,4.5703635E-2,3.8087615E-1,2.611885E-2,2.1288876E-1,2.5175926E-1,-5.3226456E-2,1.7949955E-1,-8.3888784E-2,1.06335804E-1,2.9740712E-1,-2.2267012E-2,-3.34619E-1,4.9074624E-2,-1.3708948E-1,3.6908704E-1,-5.4546572E-2,3.3199245E-1,8.8609874E-2,2.0286983E-2,-1.6990721E-1,5.305227E-1,8.559662E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":7,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,-1,57,59,-1,-1,61,63,65,67,-1,-1,-1,69,71,73,-1,-1,-1,-1,-1,75,77,-1,79,81,83,85,87,89,91,93,95,97,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.3949675E1,9.512329E0,5.5837765E0,5.847252E0,3.096321E0,7.25153E0,1.09587755E1,3.460116E0,3.7611322E0,2.8295815E0,4.2686844E-1,4.521935E0,6.758871E0,3.8043098E0,4.7881193E0,3.3301067E-1,1.5923225E0,3.8319805E0,1.9327831E0,2.8225565E-1,3.431937E0,2.1130562E-1,1.0160823E0,8.3243275E-1,2.9118547E0,5.6719904E0,5.8571587E0,5.3259463E0,0E0,5.116346E0,7.2759476E0,0E0,0E0,1.7675364E-1,2.2507625E0,4.898978E0,7.1493115E0,0E0,0E0,0E0,1.9952822E-1,5.154827E0,2.7580411E0,0E0,0E0,0E0,0E0,0E0,9.6783394E-1,6.3074994E-1,0E0,6.9835076E0,4.070241E0,7.2224693E0,4.42449E0,2.0587883E0,5.0707645E0,7.3355293E0,3.4945927E0,3.6596384E0,2.7494588E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,29,29,30,30,33,33,34,34,35,35,36,36,40,40,41,41,42,42,48,48,49,49,51,51,52,52,53,53,54,54,55,55,56,56,57,57,58,58,59,59,60,60],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,-1,58,60,-1,-1,62,64,66,68,-1,-1,-1,70,72,74,-1,-1,-1,-1,-1,76,78,-1,80,82,84,86,88,90,92,94,96,98,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.6082309E-1,-2.1057709E-1,-3.1497508E-1,-4.7465166E-1,-1.0747923E0,-1.1001399E0,3.9471725E-1,6.313125E-1,1.8732673E0,-4.1237012E-1,1.6935092E0,5.397121E-2,5.069149E-1,3.479694E-1,-1.2517285E0,9.981511E-1,-6.359675E-1,9.324553E-3,4.5103574E-1,-1.0480549E0,5.2817637E-1,-8.603547E-1,1.8931966E0,1.4323081E0,7.4762195E-1,-2.3947062E-1,-5.4467213E-1,-7.060735E-1,-2.9861182E-1,8.872895E-1,1.0972867E0,-2.9037774E-1,-6.8136916E-2,8.179088E-1,-9.3749535E-1,-1.016626E0,5.353284E-1,4.033761E-1,-3.1458295E-3,-2.5543766E-2,7.1816164E-1,7.9795814E-1,-1.3048095E0,-1.02506794E-1,-2.9688054E-1,5.6343574E-2,-2.1622014E-1,-3.2288784E-1,3.825173E-1,6.4971733E-1,-1.91335E-1,-4.26067E-1,-2.1074744E-2,-9.395118E-1,1.9577834E-1,-5.793705E-1,-8.894858E-2,1.4176449E-1,-1.6940694E0,1.1321826E-1,4.6192405E-1,-2.3269052E-2,-1.9835198E-1,-1.7095366E-1,2.3201649E-1,-2.1709181E-1,5.066097E-2,3.892342E-2,-2.724139E-1,-1.0988209E-1,-2.6279417E-1,-1.14160694E-1,4.7183162E-1,-2.714295E-1,1.4117171E-1,5.5616513E-2,-2.2150561E-1,4.5703635E-2,3.8087615E-1,2.611885E-2,2.1288876E-1,2.5175926E-1,-5.3226456E-2,1.7949955E-1,-8.3888784E-2,1.06335804E-1,2.9740712E-1,-2.2267012E-2,-3.34619E-1,4.9074624E-2,-1.3708948E-1,3.6908704E-1,-5.4546572E-2,3.3199245E-1,8.8609874E-2,2.0286983E-2,-1.6990721E-1,5.305227E-1,8.559662E-2],"split_indices":[10,1,4,0,6,5,9,8,9,10,3,3,8,7,6,4,10,8,0,8,0,0,3,10,3,8,1,0,0,10,5,0,0,8,0,9,6,0,0,0,7,9,6,0,0,0,0,0,8,10,0,8,10,1,5,6,0,4,6,4,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[6.174625E2,1.5177116E2,4.6569138E2,1.0651581E2,4.525536E1,3.0546954E2,1.6022185E2,1.8979956E1,8.753585E1,1.823991E1,2.7015451E1,1.2943679E1,2.9252585E2,7.047586E1,8.974599E1,1.2647016E1,6.332941E0,8.398389E1,3.5519648E0,9.815339E0,8.424571E0,2.3348995E1,3.6664548E0,9.785031E0,3.1586475E0,2.088378E2,8.368805E1,6.2012276E1,8.463585E0,4.893042E1,4.0815567E1,1.1245117E1,1.4018983E0,2.572186E0,3.760755E0,5.886857E1,2.5115314E1,1.9346538E0,1.617311E0,1.0375267E0,8.777813E0,3.829294E0,4.595277E0,1.9153322E0,2.1433664E1,1.4445465E0,2.2219083E0,6.877973E0,2.907058E0,2.0990143E0,1.0596333E0,1.2107846E2,8.7759346E1,3.6513252E1,4.7174793E1,1.1061807E1,5.095047E1,2.661878E1,2.2311642E1,3.567771E1,5.1378565E0,1.0254073E0,1.5467786E0,1.012635E0,2.74812E0,5.9491568E0,5.2919415E1,1.2879081E1,1.2236234E1,3.5298498E0,5.247963E0,1.9680023E0,1.8612915E0,3.103016E0,1.4922608E0,1.6454724E0,1.2615857E0,1.0902218E0,1.0087926E0,9.999582E1,2.1082638E1,3.1464531E0,8.461289E1,1.5166882E1,2.134637E1,2.8251884E1,1.892291E1,2.5605648E0,8.501242E0,2.4870872E1,2.6079597E1,3.336243E0,2.3282537E1,9.580473E0,1.2731168E1,1.5812848E1,1.9864862E1,1.2952263E0,3.8426301E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"99","size_leaf_vector":"1"}},{"base_weights":[-3.5053827E-2,-2.4576844E-1,3.552281E-2,3.5979297E-2,-4.0154326E-1,6.8996005E-2,-2.6517963E-1,-3.476082E-1,1.9303803E-1,-6.5100366E-1,-2.7127895E-1,8.288349E-2,-6.8285215E-1,4.6386287E-1,-4.573148E-1,3.356881E-1,-6.1086196E-1,-2.548311E-1,3.1612837E-1,-8.0468655E-1,-1.8212593E-1,-5.676978E-1,5.7778873E-2,1.0358966E-1,-4.6050075E-1,-2.7998304E-1,6.961979E-2,-2.9669878E-1,9.980541E-1,-6.294465E-1,5.849672E-1,-1.9252478E-1,8.772215E-1,-7.090686E-1,2.8515875E-2,6.57155E-1,3.260094E-2,-8.36469E-1,-1.80911E-2,2.9884994E-1,-2.474599E-1,-9.1332394E-1,-8.618611E-2,9.8340803E-1,-1.1947378E-1,6.867759E-2,3.6961922E-1,-7.99663E-1,-6.4029545E-2,1.1896343E-2,5.345561E-1,-7.598579E-1,2.8957424E-1,-1.1359004E-1,3.2386547E-1,3.0202175E-2,3.8331017E-1,-2.6973972E-1,-6.482727E-2,3.6531785E-1,5.5279378E-2,-1.5540743E-1,1.9672294E-1,-9.2577934E-2,-2.594296E-1,1.9282916E-1,-1.8319553E-1,7.4305125E-2,-3.0300477E-1,1.7399292E-1,-1.1280874E-1,3.9829236E-1,-4.9488325E-2,2.8585602E-2,-2.1633331E-1,-6.0321357E-2,3.403598E-2,2.0422082E-1,1.8276658E-3,-2.807234E-1,-5.3670842E-2,1.5227258E-1,-2.3080269E-1,-1.8018022E-1,1.9200592E-1,1.2193701E-1,-2.6335078E-1,-1.2480289E-1,7.97965E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":8,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,-1,35,37,39,41,43,45,47,-1,-1,-1,49,51,53,-1,55,57,-1,59,61,63,-1,65,-1,67,69,71,73,75,77,79,81,83,-1,85,-1,87,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[8.925962E0,6.622921E0,4.5482206E0,3.3521278E0,2.9769382E0,4.2932506E0,6.5555506E0,3.1186295E0,5.3152776E0,2.2008753E0,6.4184785E0,4.540307E0,1.9274416E0,8.703096E0,6.8886695E0,3.2889967E0,9.025736E-1,0E0,3.450259E0,5.2309227E-1,3.1321414E0,5.668618E0,5.356823E0,3.5545578E0,1.9443781E0,0E0,0E0,0E0,5.9822626E0,7.320627E0,2.0814922E0,0E0,1.0333953E0,8.077073E-1,0E0,4.0826664E0,7.4276743E0,7.9257965E-2,0E0,2.2920997E0,0E0,2.4953403E0,3.21757E0,2.2906537E0,3.6468258E0,4.1531196E0,5.0274043E0,5.0089264E-1,3.5872548E0,2.0949543E0,0E0,4.462679E0,0E0,4.8430476E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,18,18,19,19,20,20,21,21,22,22,23,23,24,24,28,28,29,29,30,30,32,32,33,33,35,35,36,36,37,37,39,39,41,41,42,42,43,43,44,44,45,45,46,46,47,47,48,48,49,49,51,51,53,53],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,-1,36,38,40,42,44,46,48,-1,-1,-1,50,52,54,-1,56,58,-1,60,62,64,-1,66,-1,68,70,72,74,76,78,80,82,84,-1,86,-1,88,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-7.697409E-2,-7.572781E-1,7.6534563E-1,-7.0273864E-1,-2.432298E-1,1.5690756E0,-8.1500554E-1,-6.525501E-1,-3.3469358E-1,8.8010764E-1,1.9577834E-1,1.551015E0,1.21692E0,-7.992597E-1,1.4398967E0,-7.826177E-1,1.5372332E0,-2.548311E-1,9.406206E-3,1.5085721E0,3.479694E-1,5.963377E-1,2.8029475E-1,1.4028195E0,6.6354454E-1,-2.7998304E-1,6.961979E-2,-2.9669878E-1,-5.037827E-2,2.5622377E0,3.4227094E-1,-1.9252478E-1,-4.5429462E-1,-1.7224395E-1,2.8515875E-2,-1.0480549E0,-1.4293733E-1,-1.1724524E0,-1.80911E-2,3.968685E-1,-2.474599E-1,-1.9863726E0,4.5103574E-1,-2.3947062E-1,1.4200101E0,-8.0938774E-1,-1.0924859E0,-1.1411514E-1,1.3356128E0,1.1818031E0,5.345561E-1,-1.2346513E0,2.8957424E-1,8.7150747E-1,3.2386547E-1,3.0202175E-2,3.8331017E-1,-2.6973972E-1,-6.482727E-2,3.6531785E-1,5.5279378E-2,-1.5540743E-1,1.9672294E-1,-9.2577934E-2,-2.594296E-1,1.9282916E-1,-1.8319553E-1,7.4305125E-2,-3.0300477E-1,1.7399292E-1,-1.1280874E-1,3.9829236E-1,-4.9488325E-2,2.8585602E-2,-2.1633331E-1,-6.0321357E-2,3.403598E-2,2.0422082E-1,1.8276658E-3,-2.807234E-1,-5.3670842E-2,1.5227258E-1,-2.3080269E-1,-1.8018022E-1,1.9200592E-1,1.2193701E-1,-2.6335078E-1,-1.2480289E-1,7.97965E-2],"split_indices":[10,1,6,3,0,4,9,7,2,8,5,1,1,8,8,7,9,0,2,7,7,9,5,9,9,0,0,0,0,5,10,0,10,4,0,8,9,8,0,1,0,2,0,8,6,6,6,5,9,5,0,8,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.986176E2,1.4957176E2,4.4904587E2,5.352709E1,9.604467E1,4.047599E2,4.4285973E1,1.5197271E1,3.832982E1,3.1576704E1,6.4467964E1,3.9829272E2,6.4671564E0,8.941994E0,3.534398E1,4.1501284E0,1.1047143E1,3.4246757E0,3.4905144E1,2.3242403E1,8.334302E0,3.3534496E1,3.0933468E1,3.8445218E2,1.384055E1,5.0616527E0,1.4055039E0,2.1750484E0,6.766945E0,3.067425E1,4.6697297E0,1.4136003E0,2.7365282E0,9.694043E0,1.3531E0,1.5247581E1,1.9657562E1,2.217203E1,1.0703728E0,5.073063E0,3.2612393E0,1.8999786E1,1.4534712E1,4.2406025E0,2.6692865E1,3.409643E2,4.3487885E1,6.911306E0,6.929244E0,3.4461985E0,3.3207467E0,2.8839327E1,1.8349233E0,2.2550297E0,2.4147E0,1.3568724E0,1.3796558E0,6.4019437E0,3.2920992E0,6.2562547E0,8.991325E0,1.0525567E1,9.131994E0,1.7337737E0,2.0438255E1,3.8939176E0,1.1791452E0,1.3406477E0,1.7659138E1,4.103168E0,1.0431543E1,3.1425018E0,1.0981008E0,2.0291338E1,6.401528E0,4.8042725E1,2.9292157E2,2.296295E1,2.0524933E1,5.252457E0,1.658849E0,3.9822628E0,2.9469812E0,1.7669872E0,1.6792113E0,2.4246256E0,2.6414701E1,1.2016593E0,1.0533702E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"89","size_leaf_vector":"1"}},{"base_weights":[-3.0580195E-2,-1.3082279E-2,-8.0488217E-1,-2.8945968E-1,-2.1421276E-3,-9.85745E-3,-9.381571E-1,-2.973552E-1,3.5419554E-2,-3.028496E-1,-1.2124219E-1,-4.8792684E-1,3.2771963E-1,5.824098E-2,-4.7193578E-1,8.9802735E-2,-5.8297503E-1,6.798931E-1,-2.621614E-1,7.666724E-2,-4.0928277E-1,-6.652461E-1,5.3782433E-1,-1.5836501E-1,4.10579E-1,-1.8649286E-2,-2.2583291E-1,-1.6460766E-1,2.6955798E-1,4.960261E-2,-2.777459E-1,1.541079E-1,1.7493475E-2,-1.955649E-1,1.8983483E-1,6.1028495E-2,-2.697777E-1,2.870523E-1,-1.03943855E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":9,"left_children":[1,3,5,-1,7,-1,9,11,13,-1,-1,15,17,19,21,23,25,27,29,31,33,35,37,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[7.8819838E0,5.940159E0,1.2920246E0,0E0,6.277764E0,0E0,8.339882E-3,7.7719955E0,5.8432746E0,0E0,0E0,2.7635489E0,3.404152E0,4.1798654E0,4.599584E0,6.9166822E0,3.714449E0,3.179576E0,2.0048473E0,3.6933334E0,5.0685425E0,3.9814758E0,1.8302364E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,6,6,7,7,8,8,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22],"right_children":[2,4,6,-1,8,-1,10,12,14,-1,-1,16,18,20,22,24,26,28,30,32,34,36,38,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.1349235E0,-1.9810368E0,-8.4839493E-1,-2.8945968E-1,-5.411535E-1,-9.85745E-3,1.4023165E0,9.995786E-1,8.8282496E-1,-3.028496E-1,-1.2124219E-1,-1.6232948E0,-5.3669715E-1,1.8675451E0,1.8675451E0,-1.2128284E0,-1.2662021E0,-4.5429462E-1,1.8130895E0,-1.2089357E0,1.3951799E0,-6.004373E-1,-3.0166942E-1,-1.5836501E-1,4.10579E-1,-1.8649286E-2,-2.2583291E-1,-1.6460766E-1,2.6955798E-1,4.960261E-2,-2.777459E-1,1.541079E-1,1.7493475E-2,-1.955649E-1,1.8983483E-1,6.1028495E-2,-2.697777E-1,2.870523E-1,-1.03943855E-1],"split_indices":[3,8,1,0,2,0,6,9,1,0,0,6,5,2,2,5,7,10,8,10,7,4,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.815304E2,5.6967365E2,1.1856767E1,5.4827695E0,5.6419086E2,1.929023E0,9.927743E0,6.2901897E1,5.0128897E2,7.9723015E0,1.9554418E0,4.8373337E1,1.4528562E1,4.8055814E2,2.073085E1,6.8418493E0,4.1531487E1,9.00172E0,5.526841E0,4.6314062E2,1.74175E1,1.7686163E1,3.0446877E0,5.0093994E0,1.8324499E0,1.0547259E1,3.0984228E1,1.1318394E0,7.869881E0,3.8181782E0,1.7086627E0,1.758068E1,4.4555994E2,1.4432049E1,2.9854507E0,3.7811017E0,1.3905061E1,2.0101671E0,1.0345206E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"39","size_leaf_vector":"1"}},{"base_weights":[-2.5991203E-2,-2.7634355E-1,1.3005353E-2,-5.797192E-1,6.640891E-2,3.5925966E-2,-3.5217765E-1,-1.7334577E-1,-8.041141E-1,2.7112257E-1,-6.21821E-1,4.9394578E-2,-5.510431E-1,-7.078911E-1,5.0373554E-1,-5.1215744E-1,2.2020571E-1,-2.805406E-2,-8.33639E-1,3.7756312E-1,-2.2590642E-1,5.5309635E-2,-7.507828E-1,-4.2066595E-1,6.489851E-2,-7.195962E-1,4.9774926E-2,-8.319371E-1,2.8692257E-1,-1.1161335E-1,1.084785E0,-6.550065E-1,5.4395746E-2,5.482764E-1,-2.2442836E-1,-5.9623357E-2,-8.6432153E-1,1.02141455E-1,7.645329E-1,-2.4146138E-1,-2.755919E-1,-6.450483E-1,5.4726962E-2,-1.6075787E-1,9.48438E-2,-2.5454888E-1,-4.9261168E-2,3.77473E-2,-9.202122E-1,1.7823185E-1,-5.265236E-2,2.6535884E-1,-6.822316E-1,4.1461077E-1,1.762608E-2,-2.4968627E-1,-4.7999486E-2,-1.1224032E-1,2.841979E-1,-4.5383163E-2,-2.6949826E-1,1.0283E-1,-2.643519E-1,4.437042E-1,1.3298206E-1,8.7000564E-2,-1.8211578E-1,-2.3718995E-1,6.176281E-2,1.8768857E-1,-2.821591E-1,2.3334255E-2,-1.9666606E-1,4.534888E-1,2.5482636E-2,2.9690675E-2,-3.0349147E-1,-4.8351657E-2,-2.41259E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":10,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,-1,35,37,-1,-1,39,41,43,45,-1,47,49,51,53,55,-1,57,-1,-1,59,61,63,65,-1,67,69,71,73,-1,-1,-1,75,-1,-1,-1,77,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.607746E0,8.087648E0,4.177825E0,3.61444E0,5.415721E0,3.7360578E0,9.332337E0,2.2354815E0,4.7270203E-1,3.489323E0,1.0576749E0,3.3762698E0,1.4010386E0,2.8599634E0,3.4245877E0,9.959996E-1,2.971899E0,0E0,3.1664658E-1,2.8314326E0,0E0,0E0,4.4670677E-1,1.6146936E0,3.027166E0,4.8882437E-1,0E0,1.7342567E0,5.444253E-1,3.6694634E0,1.396595E0,5.302346E-1,0E0,2.7839603E0,0E0,0E0,4.367981E-1,4.348616E0,1.9144111E0,8.1623423E-1,0E0,1.4185514E0,3.8097663E0,6.258134E0,5.4007425E0,0E0,0E0,0E0,1.7445345E0,0E0,0E0,0E0,1.2420952E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,18,18,19,19,22,22,23,23,24,24,25,25,27,27,28,28,29,29,30,30,31,31,33,33,36,36,37,37,38,38,39,39,41,41,42,42,43,43,44,44,48,48,52,52],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,-1,36,38,-1,-1,40,42,44,46,-1,48,50,52,54,56,-1,58,-1,-1,60,62,64,66,-1,68,70,72,74,-1,-1,-1,76,-1,-1,-1,78,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-5.800681E-1,1.5463712E0,9.953629E-1,-7.572781E-1,2.1349235E0,1.9762269E0,4.9114594E-1,1.4704613E-1,-1.9863726E0,1.6889015E0,-9.395118E-1,-9.1293585E-1,1.3065345E0,1.4398967E0,1.4247327E-1,1.0127703E0,6.768775E-1,-2.805406E-2,-8.603547E-1,-6.126634E-1,-2.2590642E-1,5.5309635E-2,-9.236573E-1,4.447161E-1,-1.1001399E0,1.2265209E0,4.9774926E-2,-1.0610008E0,-2.5788262E-1,-8.2178444E-1,-7.573051E-2,5.069149E-1,5.4395746E-2,-9.236573E-1,-2.2442836E-1,-5.9623357E-2,-4.1909558E-1,1.1818031E0,-7.4779874E-1,4.2232597E-1,-2.755919E-1,6.277343E-2,1.5020956E0,-1.4109713E0,-1.6940694E0,-2.5454888E-1,-4.9261168E-2,3.77473E-2,-7.146222E-1,1.7823185E-1,-5.265236E-2,2.6535884E-1,-7.21471E-2,4.1461077E-1,1.762608E-2,-2.4968627E-1,-4.7999486E-2,-1.1224032E-1,2.841979E-1,-4.5383163E-2,-2.6949826E-1,1.0283E-1,-2.643519E-1,4.437042E-1,1.3298206E-1,8.7000564E-2,-1.8211578E-1,-2.3718995E-1,6.176281E-2,1.8768857E-1,-2.821591E-1,2.3334255E-2,-1.9666606E-1,4.534888E-1,2.5482636E-2,2.9690675E-2,-3.0349147E-1,-4.8351657E-2,-2.41259E-1],"split_indices":[10,3,6,1,3,1,2,2,2,5,1,3,10,8,0,5,6,0,0,8,0,0,8,8,5,4,0,1,4,0,9,8,0,8,0,0,7,5,9,2,0,1,8,6,6,0,0,0,4,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.730267E2,7.6408516E1,4.9661813E2,4.016578E1,3.6242733E1,4.6813724E2,2.8480886E1,1.4919484E1,2.5246296E1,2.8400944E1,7.841789E0,4.5853162E2,9.605635E0,2.0241539E1,8.239347E0,7.855364E0,7.0641203E0,1.1733594E0,2.4072937E1,2.6298994E1,2.1019492E0,1.0221775E0,6.8196115E0,1.3806371E1,4.4472525E2,7.7742705E0,1.8313645E0,1.8142971E1,2.0985694E0,4.3939414E0,3.845406E0,6.5596232E0,1.2957408E0,5.6004133E0,1.463707E0,1.4544458E0,2.261849E1,1.6102303E1,1.01966915E1,2.2948716E0,4.5247397E0,9.136927E0,4.669444E0,5.1590473E1,3.9313477E2,5.8815007E0,1.8927698E0,1.4951901E0,1.664778E1,1.0898538E0,1.0087156E0,1.3999276E0,2.9940138E0,2.7125752E0,1.1328307E0,4.328179E0,2.2314441E0,1.7100469E0,3.8903663E0,1.2776204E0,2.134087E1,1.3459957E1,2.642346E0,2.0449417E0,8.15175E0,1.0195302E0,1.2753414E0,7.8625712E0,1.2743553E0,3.2033322E0,1.4661119E0,3.5377796E1,1.6212673E1,1.6758884E0,3.914589E2,1.3636658E0,1.5284115E1,1.0082077E0,1.9858062E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"79","size_leaf_vector":"1"}},{"base_weights":[-2.3886928E-2,2.7802838E-2,-1.7389923E-1,1.9541161E-2,3.2453018E-1,-4.124936E-1,-3.5905015E-2,2.9589469E-2,-7.151794E-1,2.8029996E-1,-6.092557E-1,3.302534E-2,-6.537239E-1,4.0227943E-3,4.1149566E-1,-5.2220628E-2,-3.369853E-1,-2.4083436E-1,5.784223E-1,2.1672702E-1,-6.896501E-1,1.0988358E-1,-5.8100057E-1,1.4589392E-2,-8.2137513E-1,7.4393824E-2,-1.356114E-1,-5.908576E-1,8.4862334E-1,-1.3071096E-1,1.14643864E-1,1.1291687E0,1.6036939E-2,-8.793303E-1,-3.465691E-1,4.6104994E-2,9.982649E-1,-8.745806E-1,1.3012215E-1,-2.746646E-1,-8.239768E-2,3.3381958E-2,-1.3101967E-1,-5.834802E-2,1.5256947E-1,1.7211482E-1,-2.8048798E-1,4.033171E-1,3.2385837E-2,1.0786931E-1,3.7796465E-1,-2.7806786E-1,2.2591193E-1,-2.8259304E-1,7.597515E-2,1.9352491E-1,-2.3239402E-1,3.8328443E-2,-1.8942381E-1,5.298795E-1,8.536245E-2,-2.9929984E-1,-3.1901736E-2,-9.553953E-2,1.9270332E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":11,"left_children":[1,3,5,7,-1,9,11,13,15,17,19,21,23,25,27,29,-1,-1,31,-1,33,35,37,-1,39,41,43,45,47,-1,-1,49,51,53,55,57,59,61,63,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.3703628E0,3.6323624E0,4.740024E0,3.102223E0,0E0,7.371396E0,3.9647748E0,4.027636E0,1.4754939E0,4.458162E0,4.936301E0,4.03988E0,1.1399448E0,3.8205173E0,1.1842033E1,7.010851E-1,0E0,0E0,3.166063E0,0E0,2.2726212E0,4.2441936E0,2.1037314E0,0E0,1.397028E-1,4.921049E0,5.0411263E0,3.9223127E0,6.6235666E0,0E0,0E0,6.8041325E-2,4.9377365E0,2.0020218E0,7.1862593E0,4.0471354E0,2.2509875E0,5.4409313E-1,1.0445532E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,18,18,20,20,21,21,22,22,24,24,25,25,26,26,27,27,28,28,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38],"right_children":[2,4,6,8,-1,10,12,14,16,18,20,22,24,26,28,30,-1,-1,32,-1,34,36,38,-1,40,42,44,46,48,-1,-1,50,52,54,56,58,60,62,64,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.24572E-1,1.7986078E0,6.7480576E-1,1.3256642E0,3.2453018E-1,-1.6586821E0,2.1349235E0,7.055825E-1,3.0531117E-1,1.9309677E-1,-1.3039792E0,1.066704E0,-8.4839493E-1,-6.7923063E-1,-1.0164239E-1,-2.9627237E-1,-3.369853E-1,-2.4083436E-1,3.825173E-1,2.1672702E-1,4.822475E-1,8.8010764E-1,1.1049571E0,1.4589392E-2,1.4023165E0,-1.7224395E-1,1.4742326E0,-6.35789E-1,3.5839206E-1,-1.3071096E-1,1.14643864E-1,3.7022063E-1,-1.3255169E0,1.5119832E0,7.47652E-1,5.691137E-1,-8.747628E-1,1.9996859E0,-1.6608919E-1,-2.746646E-1,-8.239768E-2,3.3381958E-2,-1.3101967E-1,-5.834802E-2,1.5256947E-1,1.7211482E-1,-2.8048798E-1,4.033171E-1,3.2385837E-2,1.0786931E-1,3.7796465E-1,-2.7806786E-1,2.2591193E-1,-2.8259304E-1,7.597515E-2,1.9352491E-1,-2.3239402E-1,3.8328443E-2,-1.8942381E-1,5.298795E-1,8.536245E-2,-2.9929984E-1,-3.1901736E-2,-9.553953E-2,1.9270332E-1],"split_indices":[7,3,7,3,0,6,3,3,6,9,1,8,1,7,6,6,0,0,8,0,6,8,3,0,6,4,10,1,6,0,0,10,5,4,6,8,5,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.6199603E2,4.1858038E2,1.4341565E2,4.163354E2,2.2449746E0,5.1823215E1,9.159244E1,4.1166504E2,4.670355E0,1.136894E1,4.0454277E1,8.325066E1,8.341772E0,3.8678247E2,2.4882582E1,2.2093177E0,2.461037E0,2.0885236E0,9.2804165E0,1.8485379E0,3.8605736E1,7.471905E1,8.531614E0,1.744273E0,6.5974994E0,2.574855E2,1.2929697E2,7.4493065E0,1.7433275E1,1.1717536E0,1.0375642E0,4.1795473E0,5.100869E0,2.3854303E1,1.4751434E1,7.069558E1,4.023472E0,5.8759336E0,2.6556802E0,5.0525503E0,1.5449487E0,2.4088702E2,1.6598463E1,1.191078E2,1.018917E1,1.5468595E0,5.902447E0,9.9548855E0,7.4783897E0,1.1513715E0,3.0281758E0,2.1247668E0,2.9761024E0,2.276348E1,1.0908252E0,4.29501E0,1.0456424E1,6.381574E1,6.879838E0,1.2276951E0,2.7957768E0,4.807115E0,1.0688186E0,1.617444E0,1.0382363E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"65","size_leaf_vector":"1"}},{"base_weights":[-1.9435674E-2,-2.6340255E-1,-1.0638147E-2,4.4065657E-1,-2.4450403E-2,1.0472703E0,1.1959169E-1,-1.5505508E-1,1.8819874E-2,4.3130435E-2,3.9907566E-1,-4.1601643E-1,4.256036E-1,-2.7390587E-1,4.6818894E-1,-1.0864724E-1,1.0056548E-1,-2.7124703E-1,6.891003E-2,-1.1774614E-1,7.273282E-1,-4.3873814E-1,1.31491935E-2,8.4381825E-1,9.09445E-2,-6.7158835E-4,-4.4822964E-1,2.3255429E-1,-7.767785E-2,2.9049024E-1,-1.6958237E-2,-1.5532643E-1,2.1313025E-1,1.3407168E-1,-7.611371E-2,-2.1689856E-2,2.9044577E-1,-2.5424385E-1,8.808267E-2,-2.731179E-2,1.2928927E-1,-5.8411654E-2,-2.7205974E-1,-4.321893E-3,1.4703049E-1,4.188346E-3,-1.182599E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":12,"left_children":[1,-1,3,5,7,9,11,13,15,-1,-1,17,19,21,23,25,27,-1,-1,-1,29,31,33,35,37,39,41,43,45,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.160755E0,0E0,3.4235632E0,3.0516527E0,3.010477E0,1.2191739E0,2.1103191E0,9.946474E0,4.1855545E0,0E0,0E0,1.7188848E0,2.2603643E0,5.3028154E0,2.9837031E0,5.7572007E0,5.772976E0,0E0,0E0,0E0,1.2249956E0,6.82798E0,4.952604E0,1.2889023E0,2.5113358E0,4.7254806E0,4.228063E0,8.962204E0,3.045965E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,3,3,4,4,5,5,6,6,7,7,8,8,11,11,12,12,13,13,14,14,15,15,16,16,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28],"right_children":[2,-1,4,6,8,10,12,14,16,-1,-1,18,20,22,24,26,28,-1,-1,-1,30,32,34,36,38,40,42,44,46,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.9810368E0,-2.6340255E-1,-1.2432345E0,-1.1661501E0,-6.289329E-1,-6.86076E-1,1.3943407E-1,1.9354022E0,-3.113201E-1,4.3130435E-2,3.9907566E-1,9.423064E-1,-6.625513E-2,-2.8234428E-1,2.186949E0,-1.3702966E-1,-3.0570334E-1,-2.7124703E-1,6.891003E-2,-1.1774614E-1,5.53022E-1,1.7388537E0,7.499093E-2,-6.9653356E-1,-8.858995E-1,-2.1928513E-1,1.2588996E-1,-3.445578E-1,1.1254588E0,2.9049024E-1,-1.6958237E-2,-1.5532643E-1,2.1313025E-1,1.3407168E-1,-7.611371E-2,-2.1689856E-2,2.9044577E-1,-2.5424385E-1,8.808267E-2,-2.731179E-2,1.2928927E-1,-5.8411654E-2,-2.7205974E-1,-4.321893E-3,1.4703049E-1,4.188346E-3,-1.182599E-1],"split_indices":[8,0,1,7,0,4,5,10,5,0,0,8,6,3,10,6,7,0,0,0,6,9,3,1,4,3,9,9,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.5108795E2,4.587429E0,5.4650055E2,1.5311662E1,5.311889E2,4.5164685E0,1.0795193E1,1.3154802E2,3.9964087E2,1.4368745E0,3.0795941E0,3.7944117E0,7.000781E0,1.10950584E2,2.0597433E1,1.5602293E2,2.4361794E2,1.9329374E0,1.8614744E0,1.8052486E0,5.1955323E0,7.0144135E1,4.0806458E1,9.701087E0,1.0896346E1,1.1914185E2,3.688108E1,1.397952E2,1.0382274E2,3.7939062E0,1.4016262E0,6.6145454E1,3.9986777E0,1.5285978E1,2.5520477E1,1.2092943E0,8.491793E0,1.3701712E0,9.526175E0,9.9168846E1,1.9973007E1,2.4669077E1,1.2212003E1,7.184628E1,6.7948906E1,8.125405E1,2.2568684E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"47","size_leaf_vector":"1"}},{"base_weights":[-1.4978629E-2,-5.826879E-3,-5.9560806E-1,-2.3293777E-1,1.0907516E-3,4.370697E-3,-7.653596E-1,1.1222622E-2,-5.235992E-1,-2.615065E-1,-6.134334E-2,1.9571953E-2,-5.513432E-1,-6.422321E-1,8.292271E-2,-3.170528E-2,1.1452009E-1,1.2627023E-1,-9.164351E-1,-7.473569E-1,-1.5116787E-2,-3.1429825E-3,-2.4617101E-1,4.4424787E-2,-2.6088282E-1,-1.6822883E-1,1.961868E-1,-7.187529E-2,-3.0789953E-1,-2.6216766E-1,-6.8021394E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":13,"left_children":[1,3,5,-1,7,-1,9,11,13,-1,-1,15,17,19,-1,21,23,25,27,29,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.884505E0,2.8585825E0,8.836477E-1,0E0,2.8356018E0,0E0,2.1589589E-1,2.4682364E0,1.1365545E0,0E0,0E0,2.518321E0,2.0170805E0,5.285754E-1,0E0,5.620977E0,6.118428E0,1.6609542E0,1.3463163E-1,3.0620098E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,6,6,7,7,8,8,11,11,12,12,13,13,15,15,16,16,17,17,18,18,19,19],"right_children":[2,4,6,-1,8,-1,10,12,14,-1,-1,16,18,20,-1,22,24,26,28,30,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.1349235E0,-1.9810368E0,-8.4839493E-1,-2.3293777E-1,1.9762269E0,4.370697E-3,1.4023165E0,1.883084E0,1.9687227E0,-2.615065E-1,-6.134334E-2,2.603036E-1,-1.4983252E-1,1.4742326E0,8.292271E-2,1.773418E0,1.3138483E0,-6.126634E-1,-4.26067E-1,9.995786E-1,-1.5116787E-2,-3.1429825E-3,-2.4617101E-1,4.4424787E-2,-2.6088282E-1,-1.6822883E-1,1.961868E-1,-7.187529E-2,-3.0789953E-1,-2.6216766E-1,-6.8021394E-2],"split_indices":[3,8,1,0,1,0,6,4,4,0,0,9,1,10,0,5,6,8,8,9,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.424885E2,5.35065E2,7.4235377E0,3.7706134E0,5.312944E2,1.8147037E0,5.6088343E0,5.221894E2,9.104973E0,4.2491636E0,1.3596704E0,5.155037E2,6.685649E0,8.10091E0,1.0040625E0,3.351632E2,1.8034055E2,2.5699477E0,4.115701E0,6.6559076E0,1.4450028E0,3.273669E2,7.7962966E0,1.752146E2,5.125948E0,1.0891804E0,1.4807675E0,1.0191109E0,3.0965903E0,4.8086195E0,1.8472884E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"31","size_leaf_vector":"1"}},{"base_weights":[-1.4860475E-2,3.8013917E-1,-2.6619973E-2,-2.2649235E-1,8.133811E-1,-5.6855935E-1,-1.3465629E-3,-5.270542E-1,2.2281367E-1,-1.2852849E-1,1.1217066E0,-6.993325E-1,2.9005206E-1,3.9303422E-1,-2.0341074E-2,1.8403584E-1,-8.903054E-1,3.6687502E-1,4.434875E-1,-8.0671245E-1,1.6334006E-1,2.2230382E-1,-1.4063193E-1,-5.954363E-1,5.606021E-1,-2.2693175E-1,2.5914488E-3,-1.1299916E-1,-3.326683E-1,-1.0285425E-1,2.6504594E-1,-2.6258367E-1,-1.3053608E0,-2.7673516E-1,4.505936E-2,1.443521E0,1.6862266E-1,-5.5775493E-1,-5.690975E-3,3.8869318E-1,-1.24364635E-2,-1.9997384E-1,3.6304116E-1,-4.626001E-1,-2.140601E-2,1.1951822E-1,4.9745932E-1,1.8127464E-1,-1.22557536E-1,-2.6215374E-1,7.888639E-2,-6.8884864E-2,2.5564066E-1,2.8547794E-1,-5.2734304E-2,-8.872898E-2,2.3308033E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":14,"left_children":[1,3,5,7,9,11,13,15,-1,-1,17,19,21,23,25,-1,27,29,-1,31,-1,-1,-1,33,35,37,39,-1,-1,-1,-1,41,43,-1,-1,45,47,49,51,53,55,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.518026E0,4.303775E0,7.19328E0,2.5216708E0,4.0650554E0,2.8303738E0,3.7748058E0,3.1009364E0,0E0,0E0,1.5506926E0,3.1906805E0,1.6476865E0,4.1690383E0,2.277906E0,0E0,1.5323925E-1,1.7053337E0,0E0,4.9120026E0,0E0,0E0,0E0,1.0751605E0,6.7867804E0,3.5128312E0,2.5209863E0,0E0,0E0,0E0,0E0,7.0615234E0,2.7674637E0,0E0,0E0,7.1098614E-1,4.0684767E0,5.2213044E0,5.91938E0,5.3392143E0,2.397402E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,10,10,11,11,12,12,13,13,14,14,16,16,17,17,19,19,23,23,24,24,25,25,26,26,31,31,32,32,35,35,36,36,37,37,38,38,39,39,40,40],"right_children":[2,4,6,8,10,12,14,16,-1,-1,18,20,22,24,26,-1,28,30,-1,32,-1,-1,-1,34,36,38,40,-1,-1,-1,-1,42,44,-1,-1,46,48,50,52,54,56,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-9.42992E-1,-3.9281076E-1,-8.2880706E-1,1.3943407E-1,-7.0572996E-1,1.9354022E0,-7.717146E-1,-7.0572996E-1,2.2281367E-1,-1.2852849E-1,-9.494192E-1,-3.222801E-2,-8.908796E-2,3.4227094E-1,-5.411535E-1,1.8403584E-1,-1.0000844E0,-2.0780419E-1,4.434875E-1,-5.135612E-1,1.6334006E-1,2.2230382E-1,-1.4063193E-1,-5.976401E-1,8.872895E-1,6.313125E-1,-4.7233352E-1,-1.1299916E-1,-3.326683E-1,-1.0285425E-1,2.6504594E-1,-6.712091E-1,3.968685E-1,-2.7673516E-1,4.505936E-2,-6.8171895E-1,-3.3206618E-1,1.0410126E0,6.533265E-2,-4.0061373E-1,-3.3469358E-1,-1.9997384E-1,3.6304116E-1,-4.626001E-1,-2.140601E-2,1.1951822E-1,4.9745932E-1,1.8127464E-1,-1.22557536E-1,-2.6215374E-1,7.888639E-2,-6.8884864E-2,2.5564066E-1,2.8547794E-1,-5.2734304E-2,-8.872898E-2,2.3308033E-3],"split_indices":[4,1,4,5,5,10,4,5,0,0,9,7,1,10,2,0,4,6,0,3,0,0,0,3,10,8,2,0,0,0,0,3,1,0,0,3,1,10,0,4,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.389156E2,1.4674525E1,5.242411E2,6.3126616E0,8.361864E0,2.2400896E1,5.0184018E2,5.1666284E0,1.1460336E0,1.5857159E0,6.776148E0,1.9601017E1,2.7998788E0,2.2154612E1,4.7968558E2,1.0816782E0,4.08495E0,2.8249826E0,3.9511652E0,1.8366785E1,1.2342327E0,1.7709175E0,1.0289614E0,2.841197E0,1.9313414E1,4.7038387E1,4.326472E2,2.0323894E0,2.0525606E0,1.3317597E0,1.4932228E0,9.512866E0,8.853918E0,1.8101712E0,1.0310258E0,5.113286E0,1.4200129E1,1.8241175E1,2.8797213E1,1.5277151E1,4.1737003E2,7.894605E0,1.6182605E0,7.2198215E0,1.6340969E0,1.3579683E0,3.7553177E0,8.065445E0,6.134684E0,1.3122417E1,5.1187563E0,2.3416853E1,5.380359E0,7.3058615E0,7.97129E0,2.6875998E1,3.9049402E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"57","size_leaf_vector":"1"}},{"base_weights":[-9.995534E-3,-5.386935E-1,-2.6755228E-3,-7.656626E-1,4.3272704E-2,-5.9907407E-2,7.6967925E-2,-2.7146626E-1,-4.008857E-2,-2.1003397E-1,2.3155296E-2,1.9808657E-1,-8.504907E-2,-2.6812357E-1,6.0188425E-1,5.3770296E-2,-6.567639E-1,2.2104299E-1,-2.5313374E-1,4.4446904E-2,-3.059419E-1,-2.6218245E-2,-4.0522856E-1,-1.8175068E-1,9.8023903E-1,-4.2565398E-2,2.5642487E-1,2.4532184E-1,-1.1089513E0,8.536583E-2,3.9667797E-1,2.0834368E-1,-4.1761324E-1,-6.928981E-1,-1.0181157E-1,-7.42418E-2,9.144118E-2,9.48715E-2,-1.4757013E-1,8.7464884E-2,-1.8613626E-1,5.3904403E-2,3.4712327E-1,3.1182253E-1,-2.7566474E-2,3.2097705E-2,2.1792111E-1,-3.73501E-1,-4.888961E-2,8.073518E-2,-1.09877266E-1,-6.87384E-2,1.7825581E-1,1.4917682E-1,-6.9402163E-3,-2.2060652E-1,1.2283944E-2,3.2612853E-2,-2.5518024E-1,3.6911674E-2,-1.5826033E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":15,"left_children":[1,3,5,7,-1,9,11,-1,-1,13,15,17,19,21,23,25,27,29,-1,31,33,35,37,39,41,43,45,-1,47,49,51,53,55,57,59,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.0655925E0,1.2402627E0,2.4104815E0,3.74429E-1,0E0,3.8416455E0,4.3562403E0,0E0,0E0,5.335122E0,4.1773543E0,3.2201862E0,2.7395103E0,3.4076514E0,2.4411461E0,3.7463555E0,6.67387E0,2.94239E0,0E0,4.702742E0,2.7214253E0,2.8843472E0,4.2646E0,8.7208307E-1,5.429411E-1,7.0621758E0,4.2619176E0,0E0,7.036772E-1,6.051861E0,6.8302717E0,3.0649772E0,2.4038486E0,1.6138101E0,2.3921156E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,28,28,29,29,30,30,31,31,32,32,33,33,34,34],"right_children":[2,4,6,8,-1,10,12,-1,-1,14,16,18,20,22,24,26,28,30,-1,32,34,36,38,40,42,44,46,-1,48,50,52,54,56,58,60,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-2.1240125E0,1.7677041E0,1.9592093E-1,6.533265E-2,4.3272704E-2,-6.6069925E-1,-3.1497508E-1,-2.7146626E-1,-4.008857E-2,1.0972867E0,1.3372214E-1,1.8675451E0,-5.976401E-1,-7.572781E-1,-6.126634E-1,1.0410126E0,-6.2121356E-1,-5.9036255E-1,-2.5313374E-1,5.599659E-1,-1.1515152E-1,-4.2915997E-1,-7.717146E-1,-4.862524E-1,-4.5770618E-1,-6.186598E-1,5.599659E-1,2.4532184E-1,-1.4369774E-1,7.8226164E-2,-3.6766437E-1,-2.7132162E-1,1.4028195E0,3.2031852E-1,3.968685E-1,-7.42418E-2,9.144118E-2,9.48715E-2,-1.4757013E-1,8.7464884E-2,-1.8613626E-1,5.3904403E-2,3.4712327E-1,3.1182253E-1,-2.7566474E-2,3.2097705E-2,2.1792111E-1,-3.73501E-1,-4.888961E-2,8.073518E-2,-1.09877266E-1,-6.87384E-2,1.7825581E-1,1.4917682E-1,-6.9402163E-3,-2.2060652E-1,1.2283944E-2,3.2612853E-2,-2.5518024E-1,3.6911674E-2,-1.5826033E-1],"split_indices":[2,10,8,0,0,3,4,0,0,5,8,2,3,1,8,10,5,0,0,2,4,4,4,4,4,3,2,0,4,2,5,1,9,8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.331069E2,6.2889214E0,5.2681793E2,4.629278E0,1.6596433E0,3.0668298E2,2.2013496E2,3.439841E0,1.1894373E0,1.0869688E2,1.979861E2,1.2583911E2,9.429585E1,1.0206434E2,6.632541E0,1.9033665E2,7.6494493E0,1.2389701E2,1.9421006E0,5.9949757E1,3.43461E1,3.735216E1,6.471218E1,2.3288138E0,4.3037276E0,1.2954497E2,6.079169E1,1.6055682E0,6.0438814E0,7.073817E1,5.3158844E1,4.4658203E1,1.5291556E1,1.1034446E1,2.3311653E1,2.2539326E1,1.4812834E1,6.6562333E0,5.8055943E1,1.2815876E0,1.0472262E0,1.1434307E0,3.160297E0,4.772789E0,1.2477217E2,4.7057224E1,1.3734468E1,5.007597E0,1.0362843E0,5.0567818E1,2.017035E1,1.271408E1,4.0444763E1,1.9353945E1,2.5304258E1,8.676135E0,6.6154213E0,1.865042E0,9.169404E0,1.571996E1,7.5916924E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"61","size_leaf_vector":"1"}},{"base_weights":[-9.564772E-3,4.4749302E-1,-1.8167242E-2,-1.4792797E-1,6.2185305E-1,-6.868118E-2,6.2577926E-2,3.1414646E-1,-7.030281E-2,-1.4576802E-1,9.2277944E-2,4.0554103E-1,-6.0393238E-3,-2.182744E-1,1.601505E-1,-6.0832703E-1,-9.509116E-2,1.2723283E-1,-2.8585732E-1,1.2423371E0,2.1317872E-1,-1.5088364E-1,1.1971153E-1,-7.1805525E-1,1.15242526E-1,1.0948728E0,-1.2516409E-1,4.7117332E-1,5.2528895E-2,9.7284764E-2,4.2921686E-1,-4.4704068E-1,4.0865377E-1,-4.905473E-1,-6.056603E-3,1.9902062E-1,-3.2700917E-1,-2.978305E-2,-2.659143E-1,-9.7637914E-2,4.6728244E-1,-2.5844462E-2,-2.1394153E-1,3.577102E-2,2.8593516E-1,3.5083015E-4,2.151277E-1,-2.8216606E-1,4.914627E-2,1.5652715E-1,-2.710592E-1,1.9436546E-1,-1.9882281E-1,2.2919212E-1,-2.838845E-2,1.637119E-1,2.2230456E-2,-1.8380336E-1,9.28085E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":16,"left_children":[1,3,5,-1,7,9,11,-1,13,15,17,19,21,-1,-1,23,25,27,-1,29,31,33,35,37,-1,39,41,43,45,-1,-1,47,49,51,53,55,57,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.0764675E0,1.906804E0,2.1142101E0,0E0,2.6268828E0,3.967753E0,4.6996503E0,0E0,2.0713432E0,5.0025682E0,3.9138894E0,5.093967E0,3.0654588E0,0E0,0E0,2.5769892E0,7.1420183E0,2.579E0,0E0,5.763931E-1,3.831852E0,3.8435683E0,3.257741E0,1.9300756E0,0E0,3.7455578E0,4.3389874E0,2.9321969E0,2.8792045E0,0E0,0E0,2.2453327E0,3.7867882E0,5.009029E0,3.880192E0,3.3007255E0,2.7282174E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,8,8,9,9,10,10,11,11,12,12,15,15,16,16,17,17,19,19,20,20,21,21,22,22,23,23,25,25,26,26,27,27,28,28,31,31,32,32,33,33,34,34,35,35,36,36],"right_children":[2,4,6,-1,8,10,12,-1,14,16,18,20,22,-1,-1,24,26,28,-1,30,32,34,36,38,-1,40,42,44,46,-1,-1,48,50,52,54,56,58,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.2736068E0,-9.3749535E-1,-2.1928513E-1,-1.4792797E-1,-3.435213E-1,6.6354454E-1,-9.52706E-1,3.1414646E-1,-6.805919E-1,-1.0437956E0,-3.03364E-1,1.4323081E0,-1.6608919E-1,-2.182744E-1,1.601505E-1,-3.874429E-1,-9.874513E-1,8.65165E-1,-2.8585732E-1,-3.670469E-1,-3.9281076E-1,-7.4779874E-1,1.1254588E0,-1.8179249E0,1.15242526E-1,-1.4996567E0,-1.6896568E-1,-1.1411514E-1,1.0127703E0,9.7284764E-2,4.2921686E-1,-9.494192E-1,8.7150747E-1,-5.1127385E-2,-7.431684E-1,-8.8221234E-1,1.8406473E0,-2.978305E-2,-2.659143E-1,-9.7637914E-2,4.6728244E-1,-2.5844462E-2,-2.1394153E-1,3.577102E-2,2.8593516E-1,3.5083015E-4,2.151277E-1,-2.8216606E-1,4.914627E-2,1.5652715E-1,-2.710592E-1,1.9436546E-1,-1.9882281E-1,2.2919212E-1,-2.838845E-2,1.637119E-1,2.2230456E-2,-1.8380336E-1,9.28085E-2],"split_indices":[1,0,3,0,4,9,7,0,9,5,3,10,0,0,0,3,5,9,0,6,1,9,5,6,0,7,7,5,5,0,0,9,6,3,4,9,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.2505023E2,8.75711E0,5.162931E2,1.0836009E0,7.673509E0,3.1769406E2,1.9859904E2,4.435305E0,3.2382038E0,2.1487845E2,1.02815605E2,3.2291134E1,1.663079E2,1.4530318E0,1.7851721E0,2.0130539E1,1.9474791E2,1.0033803E2,2.4775753E0,5.0153813E0,2.7275753E1,7.719365E1,8.9114265E1,1.8375431E1,1.7551074E0,3.9276204E0,1.908203E2,1.6957535E1,8.338049E1,1.3172821E0,3.6980991E0,5.936889E0,2.1338863E1,2.2361643E1,5.4832005E1,7.614946E1,1.2964801E1,4.2696514E0,1.4105781E1,1.0380528E0,2.8895674E0,1.8002124E2,1.0799054E1,1.0521605E1,6.4359303E0,7.832877E1,5.051723E0,2.9853334E0,2.951556E0,2.0200096E1,1.138766E0,2.5750124E0,1.978663E1,4.8697243E0,4.996228E1,1.9278296E1,5.6871166E1,8.973709E0,3.991092E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"59","size_leaf_vector":"1"}},{"base_weights":[-7.238031E-3,-2.1483509E-1,-1.6097557E-3,5.475932E-3,-5.025347E-1,-2.3262829E-2,1.6095136E-1,2.29849E-2,-6.928397E-1,-6.5512313E-3,-4.93041E-1,-7.22809E-1,2.4387793E-1,-2.4264471E-1,-5.34346E-2,1.4615173E-2,-4.0034652E-1,2.4204825E-1,-7.248226E-1,5.4440748E-2,-9.129457E-1,3.02467E-1,-6.5650946E-1,2.1223351E-3,3.1610897E-1,-2.0959254E-1,1.2607415E-1,-6.295138E-2,2.245171E-1,-2.6796195E-1,4.762836E-2,-3.3497876E-1,-1.0277882E-1,2.947634E-1,6.492449E-2,-2.409104E-1,-1.9684378E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":17,"left_children":[1,-1,3,5,7,9,11,-1,13,15,17,19,21,-1,-1,23,25,27,29,-1,31,33,35,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.0644343E0,0E0,1.8294332E0,2.2756147E0,8.454218E-1,3.3726468E0,5.998889E0,0E0,1.8489456E-1,3.4753852E0,2.6898909E0,1.3808799E0,4.0696526E0,0E0,0E0,3.0952334E0,5.499397E0,1.1380516E0,1.8483891E0,0E0,3.1502914E-1,3.8594894E0,3.6278272E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,3,3,4,4,5,5,6,6,8,8,9,9,10,10,11,11,12,12,15,15,16,16,17,17,18,18,20,20,21,21,22,22],"right_children":[2,-1,4,6,8,10,12,-1,14,16,18,20,22,-1,-1,24,26,28,30,-1,32,34,36,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.9810368E0,-2.1483509E-1,2.1349235E0,7.055825E-1,-8.4839493E-1,1.1722991E0,-3.8474053E-1,2.29849E-2,1.4023165E0,4.3232614E-1,-8.4839493E-1,-7.146222E-1,1.3858054E0,-2.4264471E-1,-5.34346E-2,1.1192183E0,6.414902E-1,-5.037827E-2,6.004839E-1,5.4440748E-2,7.8226164E-2,7.8966135E-1,1.3226638E0,2.1223351E-3,3.1610897E-1,-2.0959254E-1,1.2607415E-1,-6.295138E-2,2.245171E-1,-2.6796195E-1,4.762836E-2,-3.3497876E-1,-1.0277882E-1,2.947634E-1,6.492449E-2,-2.409104E-1,-1.9684378E-2],"split_indices":[8,0,3,3,1,6,6,0,6,3,1,4,2,0,0,6,6,0,3,0,2,3,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.167214E2,3.0759122E0,5.1364545E2,5.0745645E2,6.189009E0,4.2900723E2,7.844921E1,1.6784426E0,4.510566E0,4.1524936E2,1.3757877E1,6.067778E0,7.238143E1,3.21485E0,1.2957163E0,3.9498206E2,2.0267307E1,3.2874668E0,1.0470411E1,1.0620967E0,5.0056815E0,6.8582756E1,3.7986748E0,3.9313583E2,1.8462199E0,1.4973046E1,5.2942605E0,2.046576E0,1.2408907E0,8.785311E0,1.6851003E0,2.9828722E0,2.0228093E0,6.533089E0,6.2049664E1,2.7562008E0,1.0424739E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"37","size_leaf_vector":"1"}},{"base_weights":[-7.61159E-3,-2.4783937E-3,-5.920578E-1,7.267645E-2,-4.5554336E-2,-8.163554E-4,-2.548256E-1,1.041038E-2,7.837087E-1,-2.8486687E-1,1.10196145E-2,5.0025333E-2,-5.992228E-1,9.764499E-1,-2.6132092E-1,-3.4181662E-2,-6.680188E-1,-4.7625077E-1,4.5631513E-2,6.2410007E-3,6.683401E-1,9.463938E-2,-7.4759287E-1,-7.881214E-2,1.1650606E0,-1.902466E-1,7.7751234E-2,-6.121843E-1,1.874636E-1,-7.7678645E-1,1.05738275E-1,1.82205E-1,-6.091852E-1,6.1496384E-2,-2.6261264E-1,-3.989809E-2,5.3954214E-2,3.5304216E-1,-8.029859E-2,6.5422736E-2,-2.756702E-1,1.11910895E-1,4.4961944E-1,-4.1089214E-2,-4.3415067E-1,-1.9397764E-1,1.2664092E-1,-2.9888475E-1,3.183802E-3,-3.872081E-2,-3.2783762E-1,-2.3859741E-1,2.2441167E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":18,"left_children":[1,3,5,7,9,-1,-1,11,13,15,17,19,21,23,25,27,29,31,33,35,37,-1,39,-1,41,-1,-1,43,45,47,-1,-1,49,51,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.5440189E0,1.6597667E0,6.773248E-1,8.262302E0,4.414946E0,0E0,0E0,4.202645E0,3.282136E0,5.965213E0,4.4829545E0,4.4129176E0,1.6770136E0,3.3208275E0,7.7155983E-1,5.07778E0,3.0045843E0,2.9096587E0,3.675904E0,3.731366E0,5.4455338E0,0E0,1.7610159E0,0E0,2.4481401E0,0E0,0E0,4.155607E0,5.9036245E0,3.907094E0,0E0,0E0,3.621183E0,2.848754E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,22,22,24,24,27,27,28,28,29,29,32,32,33,33],"right_children":[2,4,6,8,10,-1,-1,12,14,16,18,20,22,24,26,28,30,32,34,36,38,-1,40,-1,42,-1,-1,44,46,48,-1,-1,50,52,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.511101E0,-2.9627237E-1,-1.4983252E-1,-3.4935325E-1,-6.625513E-2,-8.163554E-4,-2.548256E-1,4.3232614E-1,2.1961388E-1,-4.862524E-1,-6.493857E-1,7.499093E-2,-8.2178444E-1,-8.708964E-1,-4.294823E-1,-8.0026084E-1,1.8752882E0,-6.0997343E-1,2.5557446E0,7.3075134E-1,-4.803529E-1,9.463938E-2,-4.5429462E-1,-7.881214E-2,-6.004373E-1,-1.902466E-1,7.7751234E-2,1.8515531E0,-9.395118E-1,-3.9281076E-1,1.05738275E-1,1.82205E-1,-8.747628E-1,-1.5542805E0,-2.6261264E-1,-3.989809E-2,5.3954214E-2,3.5304216E-1,-8.029859E-2,6.5422736E-2,-2.756702E-1,1.11910895E-1,4.4961944E-1,-4.1089214E-2,-4.3415067E-1,-1.9397764E-1,1.2664092E-1,-2.9888475E-1,3.183802E-3,-3.872081E-2,-3.2783762E-1,-2.3859741E-1,2.2441167E-2],"split_indices":[4,6,1,6,6,0,0,3,0,4,5,3,0,3,3,4,8,2,2,9,5,0,10,0,4,0,0,10,1,1,0,0,5,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.141488E2,5.1066785E2,3.4809592E0,1.8580524E2,3.248626E2,1.3652389E0,2.1157203E0,1.7177711E2,1.4028116E1,6.1342674E1,2.6351993E2,1.6215779E2,9.619335E0,1.1899114E1,2.129002E0,3.7739815E1,2.3602858E1,1.6630737E1,2.468892E2,1.5237766E2,9.780129E0,1.1853803E0,8.433954E0,1.5199976E0,1.0379116E1,1.1132154E0,1.0157868E0,9.97228E0,2.7767536E1,2.1545263E1,2.057596E0,1.4273181E0,1.5203419E1,2.4362584E2,3.2633648E0,8.468949E1,6.768816E1,6.1707206E0,3.6094086E0,1.2296065E0,7.204348E0,3.7043023E0,6.674814E0,7.097095E0,2.875185E0,5.711802E0,2.2055735E1,1.6641096E1,4.904167E0,8.264985E0,6.938434E0,2.8272083E0,2.4079863E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"53","size_leaf_vector":"1"}},{"base_weights":[-9.290476E-3,8.055568E-2,-4.6378404E-2,-4.0541153E-2,2.5003624E-1,-1.6670597E-1,2.864618E-3,1.9992508E-1,-2.2726372E-1,7.608106E-2,7.128221E-1,-6.3902736E-2,-6.495052E-1,1.7272044E-2,-5.740512E-1,-8.187408E-2,4.681202E-1,-5.4406166E-1,6.959733E-2,2.193873E-1,-5.3240526E-1,-1.4953089E-2,1.0906476E0,-1.4276563E-1,7.2059065E-1,-7.6669466E-1,1.6342081E-1,-5.778475E-3,4.6897566E-1,-3.602018E-1,3.5711256E-1,8.439148E-1,-2.4440768E-1,6.062531E-3,8.747654E-1,-8.166094E-1,1.7070045E-1,-5.964951E-1,2.3952948E-1,-6.3416086E-2,5.2627414E-1,1.544276E-1,-8.638336E-1,3.1772527E-1,-4.1452935E-1,1.2684757E0,3.8864285E-1,-7.784883E-3,-4.144061E-1,8.9488417E-1,-6.4140014E-2,-9.132729E-1,-2.4337716E-1,6.807587E-2,-1.1652361E-1,9.4253755E-1,1.8912205E-1,3.273587E-1,-1.923179E-1,3.6344108E-1,1.7468298E-2,-2.69476E-1,3.212509E-2,2.50584E-1,-1.3889788E-1,3.106357E-1,-9.961535E-2,-3.6842528E-1,-3.1233137E-2,1.6416621E-1,-2.6035497E-1,1.3396163E-1,-3.300817E-1,1.0856579E-1,-2.3180255E-1,1.0552139E-1,-8.002723E-2,3.1513727E-1,3.1947829E-3,-6.728744E-2,-3.7513366E-1,5.1678758E-2,-1.9912975E-1,1.413424E-1,4.33017E-1,-1.52997235E-2,2.053489E-1,1.7883945E-1,-4.24347E-2,1.3322428E-1,-1.6329488E-1,1.1352363E-1,3.5647875E-1,-8.392044E-2,-3.2288432E-1,-2.7930528E-1,1.6729528E-1,-8.493955E-3,8.1036136E-2,-1.9010931E-1,-1.4122079E-2,3.3183992E-1,8.9118645E-2,-4.3200817E-2,3.316653E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":19,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,-1,53,55,-1,57,59,61,63,65,67,69,71,73,75,77,-1,79,-1,81,83,85,87,89,91,-1,93,95,97,99,101,103,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.7039701E0,3.0773695E0,2.1442077E0,3.9979112E0,4.970978E0,5.1740174E0,2.1519854E0,2.986476E0,4.7419014E0,4.1705465E0,4.6726637E0,5.5313125E0,2.9840922E0,2.6307395E0,4.109584E0,3.1965756E0,3.7780337E0,4.8752265E0,3.16604E0,3.3603835E0,3.621416E0,3.3459086E0,9.0025043E-1,2.9507103E0,1.5235415E0,1.0905447E0,0E0,1.980193E0,1.447469E0,0E0,3.1236327E0,8.5138965E-1,4.159533E0,4.4444575E0,2.419342E0,5.082569E0,3.422534E0,3.4944887E0,3.0034807E0,1.8533169E0,4.98688E0,0E0,1.530962E0,0E0,9.4145703E-1,5.0025845E-1,5.158324E-1,4.511E0,3.2221751E0,6.2419033E-1,0E0,1.0421381E0,3.3328376E0,2.837238E0,3.448535E0,1.2605953E-1,2.8950148E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,27,27,28,28,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,42,42,44,44,45,45,46,46,47,47,48,48,49,49,51,51,52,52,53,53,54,54,55,55,56,56],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,-1,54,56,-1,58,60,62,64,66,68,70,72,74,76,78,-1,80,-1,82,84,86,88,90,92,-1,94,96,98,100,102,104,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.0994488E0,-1.3762592E0,-7.0273864E-1,-2.9627237E-1,6.313125E-1,5.1763475E-1,2.6797493E0,1.6000061E0,2.221304E-2,5.6201607E-1,-1.0146359E0,6.1836034E-1,1.9996859E0,1.9404742E0,3.2174037E0,-1.1521177E0,-3.6766437E-1,2.1586609E-1,-7.3706096E-1,-1.1411514E-1,-1.312893E0,-1.3329034E0,2.1586609E-1,5.599659E-1,-5.750444E-1,1.0972867E0,1.6342081E-1,2.24572E-1,-6.712091E-1,-3.602018E-1,-1.4983252E-1,-6.855323E-1,-8.708964E-1,-7.4779874E-1,-2.8642884E-1,-2.7132162E-1,-1.4229501E0,-1.2968501E0,-2.5788262E-1,-5.793705E-1,7.8226164E-2,1.544276E-1,8.872895E-1,3.1772527E-1,-8.8525325E-1,-1.2823372E-1,4.2232597E-1,-1.5171331E0,-1.1724524E0,-8.523691E-3,-6.4140014E-2,-3.9751104E-1,-8.894858E-2,2.1586609E-1,3.3462915E-1,4.6804446E-1,9.696049E-1,3.273587E-1,-1.923179E-1,3.6344108E-1,1.7468298E-2,-2.69476E-1,3.212509E-2,2.50584E-1,-1.3889788E-1,3.106357E-1,-9.961535E-2,-3.6842528E-1,-3.1233137E-2,1.6416621E-1,-2.6035497E-1,1.3396163E-1,-3.300817E-1,1.0856579E-1,-2.3180255E-1,1.0552139E-1,-8.002723E-2,3.1513727E-1,3.1947829E-3,-6.728744E-2,-3.7513366E-1,5.1678758E-2,-1.9912975E-1,1.413424E-1,4.33017E-1,-1.52997235E-2,2.053489E-1,1.7883945E-1,-4.24347E-2,1.3322428E-1,-1.6329488E-1,1.1352363E-1,3.5647875E-1,-8.392044E-2,-3.2288432E-1,-2.7930528E-1,1.6729528E-1,-8.493955E-3,8.1036136E-2,-1.9010931E-1,-1.4122079E-2,3.3183992E-1,8.9118645E-2,-4.3200817E-2,3.316653E-1],"split_indices":[7,7,3,6,8,6,9,10,6,5,0,5,8,9,9,1,5,2,8,5,7,7,2,2,1,5,0,7,3,0,1,6,3,9,4,1,7,8,4,6,2,0,10,0,2,2,2,6,8,9,0,0,0,2,7,10,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[5.0924255E2,1.4844936E2,3.6079318E2,8.702748E1,6.1421875E1,1.04081055E2,2.5671216E2,3.8008472E1,4.9019012E1,4.5487946E1,1.59339285E1,8.674307E1,1.7337982E1,2.5140385E2,5.308294E0,1.8872936E1,1.9135534E1,2.3310398E1,2.5708614E1,3.733462E1,8.153322E0,5.7734385E0,1.0160491E1,7.956286E1,7.1802125E0,1.6114676E1,1.2233064E0,2.4013683E2,1.126702E1,3.000017E0,2.3082771E0,2.1924615E0,1.6680475E1,9.432548E0,9.702986E0,1.6772387E1,6.538012E0,4.71536E0,2.0993254E1,1.9842579E1,1.7492043E1,1.8271621E0,6.32616E0,1.1179465E0,4.655492E0,7.463056E0,2.6974347E0,5.38386E1,2.5724257E1,6.0870624E0,1.0931501E0,1.2006549E1,4.1081266E0,1.4429431E2,9.584252E1,3.305523E0,7.9614973E0,1.2763908E0,1.0318862E0,1.1244864E0,1.0679749E0,5.2880583E0,1.1392417E1,3.1258173E0,6.3067307E0,8.68816E0,1.0148258E0,1.0173478E1,6.5989084E0,5.145598E0,1.3924142E0,1.5727385E0,3.1426215E0,1.9302431E1,1.6908234E0,6.2836685E0,1.3558911E1,8.1596775E0,9.332366E0,2.978765E0,3.347395E0,1.4799644E0,3.1755273E0,2.0071516E0,5.4559045E0,1.4179554E0,1.2794793E0,9.129609E0,4.4708992E1,3.0632136E0,2.2661043E1,3.0346372E0,3.0524254E0,3.012892E0,8.993657E0,2.1231966E0,1.9849303E0,9.827181E1,4.6022495E1,1.0384874E1,8.545765E1,2.0677745E0,1.2377485E0,6.457164E0,1.5043335E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"105","size_leaf_vector":"1"}},{"base_weights":[-6.167105E-3,-1.9682686E-1,-1.4212751E-3,-4.8902687E-1,5.008818E-3,-8.2087226E-2,-2.71791E-1,1.0664369E-2,-5.342322E-1,1.3580035E-1,-1.9314232E-1,-1.4458714E-2,1.8976058E-1,-7.662205E-1,4.9380224E-2,2.4270011E-1,-2.0136843E-2,2.8342128E-1,-5.8976483E-1,-2.905942E-1,-2.6256561E-2,-1.9951009E-3,-1.405039E-1,-1.704192E-2,1.5679944E-1,2.6843457E-2,-2.4346016E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":20,"left_children":[1,-1,3,5,7,9,-1,11,13,-1,-1,15,17,19,-1,-1,21,23,25,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.5421131E0,0E0,1.5627444E0,1.0263237E0,1.5066373E0,1.6078984E0,0E0,2.1986158E0,9.574374E-1,0E0,0E0,2.021808E0,4.555724E0,4.6842337E-1,0E0,0E0,2.5777714E0,4.452531E0,1.0927165E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,3,3,4,4,5,5,7,7,8,8,11,11,12,12,13,13,16,16,17,17,18,18],"right_children":[2,-1,4,6,8,10,-1,12,14,-1,-1,16,18,20,-1,-1,22,24,26,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.9810368E0,-1.9682686E-1,-2.1240125E0,-6.493857E-1,2.8082702E0,-5.547491E-3,-2.71791E-1,1.1289028E0,8.467204E-2,1.3580035E-1,-1.9314232E-1,-1.9863726E0,5.414116E-1,2.8029475E-1,4.9380224E-2,2.4270011E-1,1.2472923E0,-6.054167E-1,-1.2981731E0,-2.905942E-1,-2.6256561E-2,-1.9951009E-3,-1.405039E-1,-1.704192E-2,1.5679944E-1,2.6843457E-2,-2.4346016E-1],"split_indices":[8,0,2,5,8,7,0,8,4,0,0,2,4,5,0,0,1,1,2,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.992707E2,2.6243982E0,4.966463E2,5.4872255E0,4.9115906E2,3.382631E0,2.1045942E0,4.870313E2,4.127753E0,1.8323673E0,1.5502638E0,4.2792285E2,5.9108475E1,3.0265985E0,1.1011546E0,1.9616517E0,4.2596118E2,5.3336468E1,5.7720065E0,2.0022118E0,1.0243866E0,4.1450372E2,1.1457473E1,2.233561E1,3.1000858E1,1.5675074E0,4.2044992E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"27","size_leaf_vector":"1"}},{"base_weights":[-6.5721753E-3,-1.7704895E-1,8.789292E-3,8.63605E-2,-5.447327E-1,4.3384895E-1,-2.4009238E-3,3.8256142E-1,-4.7454682E-1,-8.724066E-1,4.721487E-2,4.1980527E-2,9.766605E-1,-3.2836607E-1,1.1849386E-2,-2.825454E-1,6.7889655E-1,1.6118723E-1,-7.555101E-1,-1.0365134E0,-1.3919207E-1,4.2846504E-1,-2.2032553E-1,6.689178E-1,-3.4130263E-1,9.7975E-3,3.8003176E-1,5.726297E-1,-4.8475102E-1,-1.1109678E-1,4.8937973E-2,1.24692954E-1,-6.7491746E-1,2.557626E-1,3.4027174E-1,-3.540873E-1,-2.7655234E-4,-3.3395305E-1,-6.533838E-2,6.400757E-2,-1.2028136E-1,-1.0816363E-1,8.765654E-1,1.943164E-2,2.845405E-1,3.2733563E-1,-2.4977072E-1,2.3714755E-2,2.4625738E-1,1.9430238E-1,-7.031761E-1,-1.8027376E-1,2.295994E-1,2.6375318E-1,5.461112E-4,-2.9562387E-1,4.158389E-2,2.1366991E-1,-1.3792573E-1,1.2452574E-1,-9.873594E-2,5.5422947E-2,3.316843E-1,1.5324066E-2,1.3233371E-1,2.163821E-1,-2.1798249E-1,-2.5924477E-1,1.109632E-1,-1.0024751E-1,1.9021593E-2,-6.2248465E-3,2.1377228E-1,-6.827027E-2,1.1994171E-1,4.9689636E-2,-1.3006483E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":21,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,-1,35,37,39,41,-1,43,45,-1,-1,47,49,51,53,-1,55,57,-1,-1,59,-1,-1,-1,-1,-1,61,-1,-1,63,-1,-1,-1,65,67,69,71,73,75,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.3062812E0,4.0402603E0,2.179473E0,4.315893E0,3.405686E0,2.4590652E0,2.0813498E0,3.510607E0,2.9776037E0,1.205368E0,2.4391627E0,2.1195726E0,1.295845E0,2.9220097E0,1.9637694E0,1.895758E0,2.01087E0,0E0,2.404337E0,3.2308388E-1,3.8143232E-1,2.233537E0,0E0,4.9995506E-1,2.0944808E0,0E0,0E0,3.313651E-1,2.588937E0,2.394371E0,3.4368753E0,0E0,1.1863537E0,2.770931E0,0E0,0E0,6.4178854E-1,0E0,0E0,0E0,0E0,0E0,3.9004445E-1,0E0,0E0,7.3868364E-2,0E0,0E0,0E0,2.8570838E0,2.5178685E0,3.154057E0,2.1053445E0,4.154259E0,1.9701216E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,18,18,19,19,20,20,21,21,23,23,24,24,27,27,28,28,29,29,30,30,32,32,33,33,36,36,42,42,45,45,49,49,50,50,51,51,52,52,53,53,54,54],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,-1,36,38,40,42,-1,44,46,-1,-1,48,50,52,54,-1,56,58,-1,-1,60,-1,-1,-1,-1,-1,62,-1,-1,64,-1,-1,-1,66,68,70,72,74,76,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.1510396E0,-1.2854533E0,-9.42992E-1,3.929833E-1,1.8145575E-1,-1.3702966E-1,-8.2880706E-1,-5.3458095E-1,-8.2178444E-1,1.683855E0,-3.222801E-2,-7.8681755E-1,-4.5355532E-1,-8.4659064E-1,-6.289329E-1,-9.858561E-1,-1.31742265E-2,1.6118723E-1,3.4824726E-1,-3.222801E-2,-5.750444E-1,-6.126634E-1,-2.2032553E-1,5.069149E-1,-1.0571768E0,9.7975E-3,3.8003176E-1,-3.638682E-1,-8.8221234E-1,1.8515531E0,-1.1261294E0,1.24692954E-1,1.9592093E-1,-1.1411514E-1,3.4027174E-1,-3.540873E-1,5.5844444E-1,-3.3395305E-1,-6.533838E-2,6.400757E-2,-1.2028136E-1,-1.0816363E-1,8.872895E-1,1.943164E-2,2.845405E-1,-5.504646E-1,-2.4977072E-1,2.3714755E-2,2.4625738E-1,-1.0194073E0,1.9354022E0,-3.4540343E-1,8.3089806E-2,-5.3669715E-1,-8.4839493E-1,-2.9562387E-1,4.158389E-2,2.1366991E-1,-1.3792573E-1,1.2452574E-1,-9.873594E-2,5.5422947E-2,3.316843E-1,1.5324066E-2,1.3233371E-1,2.163821E-1,-2.1798249E-1,-2.5924477E-1,1.109632E-1,-1.0024751E-1,1.9021593E-2,-6.2248465E-3,2.1377228E-1,-6.827027E-2,1.1994171E-1,4.9689636E-2,-1.3006483E-2],"split_indices":[9,9,4,5,6,6,4,3,0,10,7,3,1,5,0,8,6,0,3,7,1,8,0,8,4,0,0,8,9,10,7,0,8,5,0,0,3,0,0,0,0,0,10,0,0,8,0,0,0,7,10,3,5,5,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.9721994E2,4.0230362E1,4.5698956E2,2.3884542E1,1.6345818E1,1.0753363E1,4.462362E2,1.583856E1,8.045982E0,1.0216603E1,6.129214E0,6.870631E0,3.882731E0,1.7767824E1,4.2846838E2,4.8960953E0,1.0942465E1,1.5503534E0,6.4956293E0,8.010133E0,2.2064712E0,4.4234576E0,1.7057561E0,2.3240101E0,4.5466213E0,1.1742815E0,2.7084496E0,2.2341764E0,1.5533648E1,9.8836525E1,3.2963187E2,1.7402241E0,3.1558712E0,6.481306E0,4.4611588E0,3.797123E0,2.6985064E0,6.9954357E0,1.0146968E0,1.0189246E0,1.1875466E0,1.6730174E0,2.7504404E0,1.1248218E0,1.1991885E0,2.0670085E0,2.4796126E0,1.1888008E0,1.0453755E0,3.8074014E0,1.1726246E1,8.254663E1,1.6289892E1,5.978615E1,2.698457E2,2.131192E0,1.0246793E0,3.9597366E0,2.5215695E0,1.0765077E0,1.6219987E0,1.1334673E0,1.616973E0,1.0256448E0,1.0413638E0,2.5595415E0,1.2478601E0,1.0365897E1,1.3603485E0,5.0367798E1,3.2178833E1,1.1359013E1,4.930879E0,1.2819387E1,4.6966763E1,5.61029E1,2.1374281E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"77","size_leaf_vector":"1"}},{"base_weights":[-4.085098E-3,-2.0481806E-1,-2.5391232E-4,7.531402E-1,-8.833328E-3,3.331144E-1,-5.9742056E-2,-2.9475057E-1,4.5880163E-3,-7.7529705E-1,-3.793727E-2,2.1283315E-1,-1.5020735E-2,-2.8052703E-1,-4.376518E-4,4.1844606E-1,-3.2091063E-1,9.9601954E-2,8.130702E-1,6.1756917E-3,-3.0417937E-1,2.420258E-1,-5.3957086E-2,5.311343E-2,-2.1626587E-1,8.20996E-2,-2.3073307E-1,3.9411983E-1,8.647752E-2,7.7974265E-3,-1.1004796E-1,2.0354457E-1,-1.19495235E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":22,"left_children":[1,-1,3,5,7,-1,-1,9,11,13,15,17,19,-1,-1,21,23,25,27,29,31,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.2785975E0,0E0,3.174334E0,2.1022444E0,1.8645772E0,0E0,0E0,2.6591537E0,1.9010056E0,9.305134E-1,2.1036499E0,2.6418762E0,2.6110275E0,0E0,0E0,1.6018726E0,2.1151338E0,5.4792495E0,1.25524E0,2.9483392E0,2.9615996E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,3,3,4,4,7,7,8,8,9,9,10,10,11,11,12,12,15,15,16,16,17,17,18,18,19,19,20,20],"right_children":[2,-1,4,6,8,-1,-1,10,12,14,16,18,20,-1,-1,22,24,26,28,30,32,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.8886994E0,-2.0481806E-1,-1.8533121E0,3.825173E-1,-1.3255169E0,3.331144E-1,-5.9742056E-2,5.963377E-1,-1.2694222E0,1.3805008E-1,1.1339923E0,3.382379E0,2.5594938E-1,-2.8052703E-1,-4.376518E-4,-1.7471503E0,1.5389027E-1,9.015902E-1,2.1463482E-1,8.367388E-1,-1.0002563E0,2.420258E-1,-5.3957086E-2,5.311343E-2,-2.1626587E-1,8.20996E-2,-2.3073307E-1,3.9411983E-1,8.647752E-2,7.7974265E-3,-1.1004796E-1,2.0354457E-1,-1.19495235E-1],"split_indices":[6,0,6,8,5,0,0,9,6,3,9,0,4,0,0,6,1,7,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.9083453E2,1.7606304E0,4.8907388E2,4.5295634E0,4.845443E2,3.1731544E0,1.3564091E0,2.0785517E1,4.6375882E2,6.5361757E0,1.4249342E1,3.9062344E1,4.2469647E2,5.2447805E0,1.2913954E0,5.270408E0,8.978933E0,3.3843838E1,5.2185044E0,3.9660263E2,2.8093836E1,2.9848194E0,2.2855885E0,4.24786E0,4.7310734E0,2.8765156E1,5.078683E0,1.9013753E0,3.3171291E0,3.7747937E2,1.912326E1,1.9134066E0,2.618043E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"33","size_leaf_vector":"1"}},{"base_weights":[-3.6141595E-3,-1.098876E-1,1.9548884E-2,-4.290995E-1,3.161907E-2,1.1858055E-1,-3.4746874E-2,9.273062E-2,-6.611575E-1,-2.282779E-1,2.5397414E-1,7.3615104E-2,7.4657893E-1,2.2740585E-1,-1.0025896E-1,-2.8184992E-1,6.031098E-1,-7.302393E-1,3.569668E-2,1.5987687E-1,-6.2956434E-1,-6.6488755E-1,4.055997E-1,1.0833443E-1,-4.4214928E-1,-4.1028332E-2,9.391772E-1,-2.9176316E-1,5.4588675E-1,1.2613466E-1,-1.687369E-1,-5.115349E-1,8.837608E-2,1.1538172E-2,2.559563E-1,-2.7927735E-1,-9.698972E-1,-4.8632765E-1,3.6697808E-1,5.748827E-4,-9.8497015E-1,-2.4178119E-1,-1.26103135E-2,5.710999E-1,-2.1967514E-1,2.21971E-1,-5.3998258E-2,-2.7904722E-1,-5.7571925E-2,1.1911415E0,2.3168735E-1,6.549026E-1,-5.771023E-1,1.0649644E0,1.2410218E-1,-3.7504932E-1,3.0977654E-1,-2.6891947E-1,1.0783852E-1,-7.513528E-3,-1.9902869E-1,3.3042938E-2,-2.3669939E-1,-3.3262634E-1,-7.925679E-2,-2.0472902E-1,-7.829797E-3,2.0785902E-1,-4.323551E-2,1.4380099E-1,-1.7443591E-1,-6.465356E-2,-3.1950238E-1,4.8494484E-2,2.9243016E-1,-1.7076975E-1,8.479669E-2,-1.0748481E-2,1.2520096E-1,5.2432865E-2,-1.4658235E-1,-1.0629084E-1,1.9071953E-1,4.3824366E-1,4.9623083E-2,-1.2047302E-1,2.0918258E-1,-2.4295647E-2,2.7778015E-1,1.3030313E-1,-2.0957156E-1,3.7798005E-1,-1.0051367E-1,2.18024E-1,-9.453485E-2,-2.2349173E-1,4.3848804E-3,7.655035E-3,2.1515347E-1,1.9316825E-1,-9.1300786E-2,-1.0007489E-1,8.216074E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":23,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,-1,37,39,41,43,45,47,-1,49,51,53,55,57,59,-1,-1,-1,61,63,65,67,69,71,-1,-1,73,75,77,79,-1,81,83,85,87,89,91,93,95,97,99,101,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.2045543E0,3.9688315E0,2.1651325E0,3.326383E0,3.617793E0,3.9702804E0,4.494155E0,1.9243506E0,1.0993614E0,4.6066484E0,4.9608126E0,2.4497762E0,1.7302303E0,8.816772E0,3.261303E0,9.416282E-1,5.606339E-1,1.6094751E0,0E0,2.253169E0,3.2314205E0,4.1840053E-1,3.1805716E0,2.3450465E0,1.5710732E0,0E0,1.1246071E0,5.9314666E0,7.0491858E0,4.635885E0,4.4868536E0,3.2605052E-1,0E0,0E0,0E0,1.564089E0,7.8072166E-1,3.6057884E-1,2.1501799E0,2.0068672E0,3.3857822E-1,0E0,0E0,3.7467022E0,1.3726661E0,3.7717462E0,5.343417E0,0E0,1.4405124E0,1.2941241E0,1.254392E0,1.1181226E0,2.309636E0,4.399828E0,5.318566E0,1.9742663E0,4.2070675E0,4.0234127E0,3.2790582E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,19,19,20,20,21,21,22,22,23,23,24,24,26,26,27,27,28,28,29,29,30,30,31,31,35,35,36,36,37,37,38,38,39,39,40,40,43,43,44,44,45,45,46,46,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55,56,56,57,57,58,58],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,-1,38,40,42,44,46,48,-1,50,52,54,56,58,60,-1,-1,-1,62,64,66,68,70,72,-1,-1,74,76,78,80,-1,82,84,86,88,90,92,94,96,98,100,102,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.3469358E-1,-8.603547E-1,-2.9627237E-1,-6.2121356E-1,-1.9011056E-1,-3.3165962E-1,-1.2823372E-1,-5.262896E-1,1.2452005E-1,-8.8525325E-1,-6.359675E-1,4.3232614E-1,-1.0610008E0,-6.805919E-1,-8.1500554E-1,-6.787934E-1,1.688692E0,-8.164333E-1,3.569668E-2,1.9309677E-1,-7.992597E-1,1.3372214E-1,7.47652E-1,5.599659E-1,-7.992597E-1,-4.1028332E-2,-4.1176462E-1,3.4824587E-2,-2.7735096E-1,2.5842196E-1,7.8739315E-1,-2.0551927E0,8.837608E-2,1.1538172E-2,2.559563E-1,1.3154992E0,5.6201607E-1,-8.747628E-1,1.551015E0,-1.2862124E0,-6.35789E-1,-2.4178119E-1,-1.26103135E-2,-2.7735096E-1,5.5844444E-1,7.1523346E-2,2.5811973E-1,-2.7904722E-1,6.533265E-2,-3.9281076E-1,1.4742326E0,2.522303E-1,-9.42992E-1,9.423064E-1,-8.5932416E-1,5.6201607E-1,-1.016626E0,-1.2508602E0,-3.445578E-1,-7.513528E-3,-1.9902869E-1,3.3042938E-2,-2.3669939E-1,-3.3262634E-1,-7.925679E-2,-2.0472902E-1,-7.829797E-3,2.0785902E-1,-4.323551E-2,1.4380099E-1,-1.7443591E-1,-6.465356E-2,-3.1950238E-1,4.8494484E-2,2.9243016E-1,-1.7076975E-1,8.479669E-2,-1.0748481E-2,1.2520096E-1,5.2432865E-2,-1.4658235E-1,-1.0629084E-1,1.9071953E-1,4.3824366E-1,4.9623083E-2,-1.2047302E-1,2.0918258E-1,-2.4295647E-2,2.7778015E-1,1.3030313E-1,-2.0957156E-1,3.7798005E-1,-1.0051367E-1,2.18024E-1,-9.453485E-2,-2.2349173E-1,4.3848804E-3,7.655035E-3,2.1515347E-1,1.9316825E-1,-9.1300786E-2,-1.0007489E-1,8.216074E-2],"split_indices":[2,0,6,5,6,6,2,6,7,2,10,3,1,9,9,2,8,2,0,9,8,8,6,2,8,0,9,10,9,10,5,2,0,0,0,8,5,5,1,7,1,0,0,9,3,8,8,0,0,1,10,6,4,8,7,5,9,10,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.8746204E2,8.656276E1,4.0089926E2,2.5962954E1,6.059981E1,1.4154575E2,2.5935352E2,8.176596E0,1.7786358E1,2.7928865E1,3.2670944E1,1.3313065E2,8.415095E0,5.1360096E1,2.0799342E2,4.9738913E0,3.2027042E0,1.6398266E1,1.3880914E0,1.4502517E1,1.3426349E1,4.148101E0,2.8522844E1,1.2547415E2,7.656496E0,1.55824E0,6.8568554E0,1.9559399E1,3.1800697E1,4.810673E1,1.5988669E2,3.6372118E0,1.3366793E0,1.3372129E0,1.8654912E0,6.4419074E0,9.956359E0,3.1925979E0,1.1309918E1,5.2018256E0,8.224524E0,3.1425114E0,1.0055894E0,2.2621847E1,5.9009967E0,7.3591225E1,5.1882923E1,2.7492263E0,4.90727E0,4.552072E0,2.3047833E0,4.2300954E0,1.5329303E1,1.3572513E1,1.8228186E1,1.262073E1,3.5486E1,1.1739204E2,4.249466E1,1.1425879E0,2.4946241E0,4.0962987E0,2.3456087E0,7.8421836E0,2.1141756E0,1.9001431E0,1.2924548E0,6.6891446E0,4.620774E0,2.9509137E0,2.2509117E0,1.1228101E0,7.101713E0,1.1925704E1,1.0696143E1,3.401093E0,2.4999037E0,3.2078228E1,4.1512997E1,3.4382267E1,1.7500656E1,3.7788875E0,1.1283824E0,3.2685773E0,1.2834947E0,1.0347985E0,1.2699848E0,1.3273493E0,2.9027462E0,1.3674176E0,1.3961885E1,1.2001243E1,1.5712698E0,7.408486E0,1.0819699E1,6.0066166E0,6.614114E0,2.1527838E1,1.3958162E1,3.7429037E0,1.1364913E2,1.1338938E1,3.1155722E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"103","size_leaf_vector":"1"}},{"base_weights":[-4.7977744E-3,1.1658211E-3,-4.4999862E-1,-1.7354478E-1,5.040892E-3,-6.207363E-1,8.389852E-3,-5.2281967E-3,2.351755E-1,7.073462E-3,-2.4319081E-1,-2.8328857E-1,9.846136E-3,8.153999E-1,3.834247E-2,7.7478676E-3,-7.8773487E-1,-1.3205192E-2,2.1593371E-1,3.706574E-2,3.320279E-1,-2.6178735E-1,5.53782E-1,-1.1333562E-1,1.5115103E-1,-8.462955E-3,-2.6685315E-1,4.9220055E-4,-2.306021E-1,2.4353753E-1,2.0104844E-2,1.7764716E-1,-1.5554689E-1,2.343089E-1,-9.227637E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":24,"left_children":[1,3,5,-1,7,9,-1,11,13,-1,-1,15,17,19,21,23,25,27,29,-1,-1,31,33,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.2753314E0,1.0694113E0,5.3550994E-1,0E0,1.1182129E0,5.967922E-1,0E0,1.9033809E0,2.2769976E0,0E0,0E0,3.4343333E0,2.050738E0,8.853016E-1,2.6419501E0,3.2118678E0,6.360645E-1,4.360264E0,3.7987893E0,0E0,0E0,2.6920383E0,1.4657598E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,7,7,8,8,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,21,21,22,22],"right_children":[2,4,6,-1,8,10,-1,12,14,-1,-1,16,18,20,22,24,26,28,30,-1,-1,32,34,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.1349235E0,-1.9810368E0,1.4023165E0,-1.7354478E-1,1.7986078E0,-1.2968501E0,8.389852E-3,-8.3161515E-1,-3.720675E-1,7.073462E-3,-2.4319081E-1,-5.1429987E-1,6.414551E-1,-3.0166942E-1,-2.9512849E-2,1.1150343E0,1.4119537E-1,1.3256642E0,-7.3706096E-1,3.706574E-2,3.320279E-1,-2.9598737E-2,-1.0831622E0,-1.1333562E-1,1.5115103E-1,-8.462955E-3,-2.6685315E-1,4.9220055E-4,-2.306021E-1,2.4353753E-1,2.0104844E-2,1.7764716E-1,-1.5554689E-1,2.343089E-1,-9.227637E-2],"split_indices":[3,8,6,0,3,8,0,10,4,0,0,1,7,8,4,7,7,3,8,0,0,5,10,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.7915366E2,4.738043E2,5.3493657E0,2.1617258E0,4.7164258E2,3.7213314E0,1.6280345E0,4.524314E2,1.9211155E1,1.0464951E0,2.6748362E0,2.2351114E1,4.300803E2,4.0702567E0,1.5140899E1,1.4798102E1,7.5530114E0,3.8765607E2,4.242422E1,1.6281626E0,2.442094E0,9.880054E0,5.2608438E0,8.461082E0,6.33702E0,1.0434196E0,6.5095916E0,3.8116357E2,6.4924927E0,7.592673E0,3.4831547E1,1.9815274E0,7.898527E0,4.236449E0,1.024395E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"35","size_leaf_vector":"1"}},{"base_weights":[-4.5644334E-3,3.478304E-1,-1.0926952E-2,-9.845408E-2,5.270162E-1,-1.5270563E-2,5.0406337E-1,-6.72166E-2,6.8785924E-1,-5.6870885E-2,4.0970284E-2,2.6541027E-1,-6.535437E-2,2.9199205E-2,8.6849594E-1,1.0898285E-2,-2.0890011E-1,2.2898258E-1,-2.706118E-2,3.0302104E-1,7.508472E-2,-4.4667393E-2,2.6017725E-1,-4.5530987E-1,-2.8380439E-2,3.9171814E-3,4.3384755E-1,-5.509898E-1,3.085688E-2,-3.9890785E-2,4.39952E-2,1.9015981E-1,-1.7865483E-2,6.3218065E-2,-1.8343219E-1,-7.2289936E-2,8.4662735E-2,2.880994E-1,-3.700651E-2,2.011779E-1,-9.062663E-2,-2.0972474E-1,1.2289087E-1,6.138674E-2,-2.024522E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":25,"left_children":[1,3,5,-1,7,9,11,-1,13,15,17,-1,-1,-1,19,21,23,25,27,-1,-1,29,31,33,35,37,39,41,43,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.0723021E0,1.2004144E0,1.0569834E0,0E0,1.0104645E0,1.0928912E0,1.2788714E0,0E0,5.6446433E-1,2.7671113E0,2.5486002E0,0E0,0E0,0E0,1.4391732E-1,2.5958016E0,3.6697252E0,2.438294E0,4.4731493E0,0E0,0E0,2.6112099E0,4.1182885E0,3.7621975E0,3.2934315E0,3.3107514E0,5.027629E0,2.3734508E0,2.2896092E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,8,8,9,9,10,10,14,14,15,15,16,16,17,17,18,18,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28],"right_children":[2,4,6,-1,8,10,12,-1,14,16,18,-1,-1,-1,20,22,24,26,28,-1,-1,30,32,34,36,38,40,42,44,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.2736068E0,-6.004373E-1,2.6749263E0,-9.845408E-2,-1.016626E0,1.9592093E-1,-3.435213E-1,-6.72166E-2,-6.359675E-1,-2.3947062E-1,-5.461783E-1,2.6541027E-1,-6.535437E-2,2.9199205E-2,1.8145575E-1,-4.26067E-1,-3.445578E-1,-5.4467213E-1,-3.445578E-1,3.0302104E-1,7.508472E-2,6.004839E-1,-4.862524E-1,-2.6587364E-1,8.034405E-1,-6.6783863E-1,9.3145706E-2,5.7910216E-1,-6.7923063E-1,-3.9890785E-2,4.39952E-2,1.9015981E-1,-1.7865483E-2,6.3218065E-2,-1.8343219E-1,-7.2289936E-2,8.4662735E-2,2.880994E-1,-3.700651E-2,2.011779E-1,-9.062663E-2,-2.0972474E-1,1.2289087E-1,6.138674E-2,-2.024522E-2],"split_indices":[1,4,5,0,9,8,4,0,10,8,9,0,0,0,6,8,9,1,9,0,0,3,4,2,10,6,1,1,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.7555887E2,7.4821715E0,4.680767E2,1.3934861E0,6.088685E0,4.6512402E2,2.952673E0,1.0046011E0,5.084084E0,2.6735513E2,1.9776889E2,1.785661E0,1.1670121E0,1.55134E0,3.5327442E0,1.8556516E2,8.178997E1,5.1919075E1,1.4584981E2,2.3587296E0,1.1740147E0,1.524124E2,3.3152763E1,3.3939823E1,4.785015E1,2.5225437E1,2.669364E1,1.3670726E1,1.321791E2,1.0449037E2,4.792202E1,1.4833425E1,1.831934E1,6.3788137E0,2.7561008E1,2.8539902E1,1.9310247E1,2.1938431E0,2.3031593E1,2.0263733E1,6.429908E0,1.208061E1,1.5901148E0,4.7379734E1,8.4799355E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"45","size_leaf_vector":"1"}},{"base_weights":[-2.0930301E-3,-1.0892337E-1,2.0892378E-2,-3.6976293E-1,5.6845834E-3,2.9616473E-2,-4.2100668E-1,-5.5175793E-1,8.262226E-2,5.8914626E-1,-6.916965E-2,4.6285415E-1,1.9045759E-2,-5.991273E-1,4.2320207E-2,-6.38428E-1,1.0987253E-1,2.2403224E-1,-2.2344023E-1,-1.610708E-1,1.1981913E0,-1.10067114E-1,2.6137763E-1,-1.4328969E-1,6.69386E-1,-3.4359336E-1,3.1610727E-2,-2.409959E-1,5.9933458E-2,1.1169864E-1,-7.4062055E-1,8.7634295E-2,-2.3181675E-1,-1.8988675E-1,9.173345E-2,1.0710478E-1,4.1088092E-1,9.264912E-2,-3.705251E-1,8.747144E-1,-6.508506E-2,-2.692661E-1,-1.8256284E-2,1.2311286E-2,3.3852646E-1,2.8003024E-2,-2.4302882E-1,1.2217433E-1,-1.1712334E-1,-1.6312037E-1,8.345593E-2,2.9004149E-3,-2.3980074E-1,2.6203971E-3,3.372724E-1,-7.1743146E-2,2.1380262E-1,6.2156976E-3,-2.7836013E-1,1.362707E-1,-2.0196362E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":26,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,-1,29,-1,-1,31,33,35,37,-1,-1,39,41,43,-1,-1,-1,45,47,-1,-1,-1,-1,-1,49,51,53,-1,-1,55,57,59,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.1595283E0,2.5014753E0,1.5108784E0,2.1480768E0,2.6206207E0,1.740993E0,8.4445596E-1,1.6930747E0,1.832858E0,3.3099358E0,2.1438985E0,2.0790834E0,1.7151349E0,1.1318467E0,0E0,2.0444412E0,0E0,0E0,1.1578771E0,1.0611038E0,7.0994854E-2,2.775511E0,0E0,0E0,1.6171436E0,2.2562022E0,2.1425407E0,0E0,0E0,0E0,9.960203E-1,9.1858053E-1,0E0,0E0,0E0,0E0,0E0,3.6928248E0,3.7448063E0,1.4091444E0,0E0,0E0,1.6178617E0,2.7227566E0,2.8114495E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,15,15,18,18,19,19,20,20,21,21,24,24,25,25,26,26,30,30,31,31,37,37,38,38,39,39,42,42,43,43,44,44],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,-1,30,-1,-1,32,34,36,38,-1,-1,40,42,44,-1,-1,-1,46,48,-1,-1,-1,-1,-1,50,52,54,-1,-1,56,58,60,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.3469358E-1,-8.603547E-1,1.5690756E0,1.688692E0,-8.28857E-1,-1.8002312E0,1.8060604E0,2.5782444E0,-7.573051E-2,-2.8642884E-1,1.8259916E0,-1.8886994E0,-1.3255169E0,2.1684301E-1,4.2320207E-2,-1.0000844E0,1.0987253E-1,2.2403224E-1,-6.712091E-1,-4.7233352E-1,5.069149E-1,-2.5788262E-1,2.6137763E-1,-1.4328969E-1,6.648006E-1,7.664258E-1,1.4538637E0,-2.409959E-1,5.9933458E-2,1.1169864E-1,-1.2182465E0,-6.2121356E-1,-2.3181675E-1,-1.8988675E-1,9.173345E-2,1.0710478E-1,4.1088092E-1,-4.2400864E-1,-9.5407325E-1,2.603036E-1,-6.508506E-2,-2.692661E-1,-5.1127385E-2,1.376723E0,-3.8474053E-1,2.8003024E-2,-2.4302882E-1,1.2217433E-1,-1.1712334E-1,-1.6312037E-1,8.345593E-2,2.9004149E-3,-2.3980074E-1,2.6203971E-3,3.372724E-1,-7.1743146E-2,2.1380262E-1,6.2156976E-3,-2.7836013E-1,1.362707E-1,-2.0196362E-1],"split_indices":[2,0,4,8,3,6,9,10,9,4,4,6,5,6,0,4,0,0,3,2,8,4,0,0,7,2,0,0,0,0,9,5,0,0,0,0,0,5,2,9,0,0,3,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.702825E2,8.260706E1,3.8767545E2,2.4536776E1,5.8070286E1,3.8108496E2,6.590507E0,1.7340866E1,7.1959105E0,5.821696E0,5.2248592E1,8.057658E0,3.7302728E2,4.954507E0,1.6359999E0,1.612319E1,1.2176757E0,1.8157707E0,5.38014E0,2.938101E0,2.8835945E0,5.0917255E1,1.3313353E0,1.2145078E0,6.8431506E0,1.160981E1,3.6141748E2,3.9415584E0,1.0129486E0,1.2377336E0,1.4885456E1,3.9714491E0,1.4086906E0,1.2842216E0,1.6538794E0,1.0099965E0,1.8735981E0,2.8994736E1,2.192252E1,5.566686E0,1.2764645E0,3.644825E0,7.9649844E0,3.4101398E2,2.0403494E1,1.1182883E0,1.3767168E1,2.4688885E0,1.5025607E0,6.109356E0,2.2885382E1,1.213808E1,9.784439E0,1.476728E0,4.089958E0,6.633242E0,1.3317424E0,3.3896072E2,2.0532515E0,1.8803978E1,1.5995147E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"61","size_leaf_vector":"1"}},{"base_weights":[-3.224555E-3,-4.2976362E-1,1.8518226E-3,1.8124424E-2,-1.9630232E-1,-1.9326074E-2,1.1401545E-1,7.062765E-4,-1.9354042E-1,2.0733301E-1,-3.9978552E-1,-8.644793E-3,6.62079E-1,-4.3684945E-1,1.2738332E-1,1.0192963E-1,5.3044444E-1,-4.7368977E-2,-7.542942E-1,-2.2753945E-1,7.4690594E-3,8.5385345E-2,3.3579805E-1,-9.291093E-1,-1.5953793E-1,2.4527916E-1,-2.1426018E-1,-4.7502923E-1,2.2778E-1,7.0601165E-1,-3.9158118E-1,-1.9895685E-1,2.3640798E-1,-2.7544388E-1,1.5594051E-3,3.508717E-2,-2.1916199E-1,-1.444522E-2,2.7002169E-2,1.918041E-1,-1.7137355E-1,-2.2995744E-2,-3.1517258E-1,5.7305653E-2,-1.6736917E-1,1.4801131E-1,-7.197005E-2,-2.2304325E-1,1.39202885E-2,-7.648421E-3,1.5686926E-1,2.6466832E-1,-1.0335983E-1,-2.0499387E-1,2.8979618E-2,3.1335437E-1,-7.207334E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":27,"left_children":[1,3,5,-1,-1,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,-1,41,43,45,-1,47,49,51,53,-1,55,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.0112981E0,6.4423347E-1,1.1000911E0,0E0,0E0,1.3599508E0,3.6094823E0,2.1731188E0,3.1935637E0,2.0883064E0,1.4193757E0,1.2240801E0,1.1962073E0,3.0028248E0,1.9713985E0,3.642174E0,2.7427025E0,1.3639442E0,7.1898603E-1,4.2085204E0,1.4906403E0,1.5470517E0,0E0,7.5927544E-1,2.3370879E0,2.119307E0,0E0,1.2998021E0,3.0503213E0,2.7392516E0,5.0468355E-1,0E0,2.3563209E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,23,23,24,24,25,25,27,27,28,28,29,29,30,30,32,32],"right_children":[2,4,6,-1,-1,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,-1,42,44,46,-1,48,50,52,54,-1,56,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-2.1240125E0,-1.6586821E0,1.0045053E0,1.8124424E-2,-1.9630232E-1,9.4228196E-1,5.080525E-1,9.068947E-1,1.0127703E0,9.995786E-1,2.1586609E-1,-6.359675E-1,3.4824587E-2,-3.435213E-1,6.8245757E-1,-6.6783863E-1,2.0618846E0,-1.4640522E0,1.2684474E0,-6.9653356E-1,-3.874429E-1,-8.6145854E-1,3.3579805E-1,-8.1500554E-1,4.2115542E-1,-5.1429987E-1,-2.1426018E-1,-6.712091E-1,-4.8392758E-1,8.367388E-1,-4.5355532E-1,-1.9895685E-1,-6.2121356E-1,-2.7544388E-1,1.5594051E-3,3.508717E-2,-2.1916199E-1,-1.444522E-2,2.7002169E-2,1.918041E-1,-1.7137355E-1,-2.2995744E-2,-3.1517258E-1,5.7305653E-2,-1.6736917E-1,1.4801131E-1,-7.197005E-2,-2.2304325E-1,1.39202885E-2,-7.648421E-3,1.5686926E-1,2.6466832E-1,-1.0335983E-1,-2.0499387E-1,2.8979618E-2,3.1335437E-1,-7.207334E-2],"split_indices":[2,6,8,0,0,6,7,6,5,9,2,10,10,4,0,6,8,6,7,1,3,8,0,9,5,1,0,3,1,0,1,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.656858E2,4.493142E0,4.6119263E2,1.6414294E0,2.851713E0,3.886402E2,7.255243E1,3.494536E2,3.9186604E1,6.1905502E1,1.064693E1,3.4555478E2,3.8988285E0,2.208306E1,1.7103542E1,4.767027E1,1.4235235E1,5.9077063E0,4.7392235E0,2.2794039E1,3.2276074E2,2.2490287E0,1.6497998E0,7.1105676E0,1.4972492E1,1.5623437E1,1.4801064E0,8.039369E0,3.96309E1,1.2155022E1,2.0802128E0,1.4418209E0,4.465885E0,3.7263994E0,1.0128238E0,1.3984065E1,8.809974E0,1.9307224E2,1.2968849E2,1.23416E0,1.0148687E0,1.0902411E0,6.020326E0,8.240939E0,6.7315526E0,1.0326298E1,5.297139E0,5.0259914E0,3.0133772E0,2.1819046E1,1.7811853E1,1.0546237E1,1.6087849E0,1.0518949E0,1.0283179E0,1.2148683E0,3.251017E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"57","size_leaf_vector":"1"}},{"base_weights":[-5.7785767E-3,-1.2040121E-3,-3.7747565E-1,3.4032807E-1,-6.3741044E-3,-5.465217E-1,9.8616155E-3,-1.9788127E-1,1.011037E0,-2.2041616E-1,7.3098233E-3,-6.724099E-3,-2.1636716E-1,9.947324E-2,-2.0524119E-1,3.6536872E-1,9.980906E-2,-5.8390673E-2,-7.335578E-1,5.9155977E-1,-2.167509E-3,-5.85284E-1,6.866059E-2,-2.7096188E-1,-2.5985417E-1,-1.1718996E-1,1.0834597E0,-3.9556322E-1,5.6966823E-3,-2.174313E-3,-2.2956845E-1,1.7255071E-1,-5.1500488E-2,4.085086E-2,-1.5779515E-1,1.993755E-1,-1.9731049E-1,3.0238248E-2,4.4112203E-1,1.356216E-1,-1.8000421E-1,8.986976E-2,-6.785273E-4],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":28,"left_children":[1,3,5,7,9,11,-1,13,15,17,19,-1,-1,-1,-1,-1,-1,21,23,25,27,29,31,-1,33,35,37,39,41,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[7.840582E-1,8.087232E-1,4.06848E-1,2.6655443E0,1.3224043E0,3.5257506E-1,0E0,1.4177074E0,4.444766E-2,2.2127457E0,2.3535504E0,0E0,0E0,0E0,0E0,0E0,0E0,1.4931908E0,2.8154588E-1,2.4923706E0,1.2986304E0,4.4400263E-1,2.33651E0,0E0,3.981182E-1,1.992992E0,1.4117961E0,1.6969357E0,9.637841E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,7,7,8,8,9,9,10,10,17,17,18,18,19,19,20,20,21,21,22,22,24,24,25,25,26,26,27,27,28,28],"right_children":[2,4,6,8,10,12,-1,14,16,18,20,-1,-1,-1,-1,-1,-1,22,24,26,28,30,32,-1,34,36,38,40,42,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.1349235E0,-1.3347092E0,1.4023165E0,1.6019539E0,-8.3161515E-1,-1.2968501E0,9.8616155E-3,-7.572781E-1,6.8245757E-1,2.1961388E-1,-8.0366546E-1,-6.724099E-3,-2.1636716E-1,9.947324E-2,-2.0524119E-1,3.6536872E-1,9.980906E-2,1.8145575E-1,-9.66626E-4,6.948162E-1,-5.800681E-1,-3.1497508E-1,2.8029475E-1,-2.7096188E-1,-3.0169392E-1,-3.3238387E-1,1.9309677E-1,-8.6145854E-1,-1.2432345E0,-2.174313E-3,-2.2956845E-1,1.7255071E-1,-5.1500488E-2,4.085086E-2,-1.5779515E-1,1.993755E-1,-1.9731049E-1,3.0238248E-2,4.4112203E-1,1.356216E-1,-1.8000421E-1,8.986976E-2,-6.785273E-4],"split_indices":[3,10,6,7,10,8,0,1,0,0,10,0,0,0,0,0,0,6,4,7,10,4,5,0,1,7,9,8,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.6038248E2,4.5577637E2,4.6061096E0,5.8299255E0,4.4994644E2,3.0271852E0,1.5789243E0,3.6255643E0,2.2043612E0,2.6129221E1,4.2381723E2,1.0388677E0,1.9883175E0,1.8879622E0,1.7376021E0,1.0797024E0,1.1246588E0,2.0705278E1,5.423941E0,5.7847676E0,4.1803244E2,3.3219924E0,1.7383286E1,3.3260767E0,2.0978646E0,2.6820788E0,3.1026888E0,7.226708E0,4.1080576E2,1.0356008E0,2.2863917E0,5.145519E0,1.2237767E1,1.0394331E0,1.0584315E0,1.0025221E0,1.6795566E0,1.2326967E0,1.869992E0,1.1689968E0,6.057711E0,9.865782E0,4.0093997E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"43","size_leaf_vector":"1"}},{"base_weights":[-4.6364344E-3,-1.1080308E-3,-4.7789317E-1,-4.65852E-3,4.4722632E-1,-1.9648457E-1,-9.138318E-3,-8.74545E-2,2.1401163E-2,2.1963404E-1,-3.544484E-2,6.334306E-1,-1.30582E-1,2.7589008E-2,-2.8831586E-1,8.724553E-1,-2.8186196E-2,-3.1705415E-1,3.7137598E-3,-4.272892E-1,3.888159E-2,7.578039E-2,3.0218852E-1,1.2161457E-1,-1.2607868E-1,-7.2170414E-2,4.2412464E-2,1.3716039E-2,-2.367538E-1,2.0041408E-2,-2.6731526E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":29,"left_children":[1,3,5,7,9,-1,-1,11,13,-1,-1,15,17,19,-1,21,-1,23,25,27,29,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[7.655645E-1,7.285675E-1,2.4883229E-1,9.809934E-1,6.657315E-1,0E0,0E0,3.466339E0,2.1297016E0,0E0,0E0,1.1798127E0,2.5935402E0,1.787595E0,0E0,1.5091276E-1,0E0,3.4027286E0,2.0909436E0,1.4799776E0,1.2115047E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,7,7,8,8,11,11,12,12,13,13,15,15,17,17,18,18,19,19,20,20],"right_children":[2,4,6,8,10,-1,-1,12,14,-1,-1,16,18,20,-1,22,-1,24,26,28,30,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.0923681E0,2.6749263E0,-2.7132162E-1,-6.289329E-1,-4.92467E-1,-1.9648457E-1,-9.138318E-3,-1.2854533E0,2.5782444E0,2.1963404E-1,-3.544484E-2,-5.4334486E-1,4.447161E-1,-9.1293585E-1,-2.8831586E-1,-1.7397814E0,-2.8186196E-2,-6.5014505E-1,-3.3469358E-1,-5.4334486E-1,6.768775E-1,7.578039E-2,3.0218852E-1,1.2161457E-1,-1.2607868E-1,-7.2170414E-2,4.2412464E-2,1.3716039E-2,-2.367538E-1,2.0041408E-2,-2.6731526E-2],"split_indices":[6,5,1,0,7,0,0,9,10,0,0,4,8,3,0,7,0,6,2,4,6,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.5778262E2,4.5538977E2,2.3928537E0,4.5279358E2,2.596194E0,1.3821306E0,1.0107232E0,1.07833275E2,3.449603E2,1.530218E0,1.0659759E0,5.314412E0,1.0251886E2,3.4376706E2,1.1932489E0,3.8498704E0,1.4645416E0,4.2351776E1,6.0167088E1,7.4350863E0,3.3633197E2,1.2012246E0,2.6486459E0,4.9281178E0,3.7423656E1,2.1416187E1,3.87509E1,3.6014569E0,3.8336294E0,2.7648804E2,5.984391E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"31","size_leaf_vector":"1"}},{"base_weights":[-4.076062E-3,-1.5097484E-1,-9.1224094E-4,-1.5659288E-1,2.030921E-3,5.216526E-1,-3.8436821E-3,7.421326E-1,-2.6000135E-2,3.666886E-4,-1.845288E-1,3.0629992E-1,5.4681707E-2,-2.0417242E-1,8.870365E-3,-1.7406398E-1,1.2473341E-2,-7.457097E-4,6.4258106E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":30,"left_children":[1,-1,3,-1,5,7,9,11,-1,13,-1,-1,-1,15,17,-1,-1,-1,-1],"loss_changes":[7.214162E-1,0E0,6.99159E-1,0E0,1.3868017E0,7.454896E-1,1.15724E0,4.1221452E-1,0E0,7.801537E-1,0E0,0E0,0E0,1.6786274E0,1.004526E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,4,4,5,5,6,6,7,7,9,9,13,13,14,14],"right_children":[2,-1,4,-1,6,8,10,12,-1,14,-1,-1,-1,16,18,-1,-1,-1,-1],"split_conditions":[-1.9810368E0,-1.5097484E-1,-1.8886994E0,-1.5659288E-1,-1.8533121E0,3.825173E-1,2.511101E0,3.4158805E-1,-2.6000135E-2,-1.3255169E0,-1.845288E-1,3.0629992E-1,5.4681707E-2,7.9795814E-1,7.4123514E-1,-1.7406398E-1,1.2473341E-2,-7.457097E-4,6.4258106E-2],"split_indices":[8,0,6,0,6,8,4,4,0,5,0,0,0,9,4,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.5608694E2,1.8770096E0,4.5420993E2,1.5606279E0,4.526493E2,4.0787296E0,4.485706E2,2.8322384E0,1.2464912E0,4.4649448E2,2.076101E0,1.3407437E0,1.4914948E0,1.6903532E1,4.2959094E2,6.142874E0,1.0760659E1,4.080124E2,2.1578539E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"19","size_leaf_vector":"1"}},{"base_weights":[-3.6546374E-3,5.68906E-2,-2.8712116E-2,2.058622E-1,-7.605771E-2,-3.5087633E-1,-1.4560879E-2,-2.0026019E-1,3.6326906E-1,-2.8515208E-1,8.4459536E-2,-3.041782E-1,-2.2090001E-2,2.3168984E-1,-3.283839E-2,7.873966E-1,-3.9609835E-1,-4.1699767E-1,4.432222E-1,1.464394E-1,-5.6227326E-1,3.281298E-1,-1.3077183E-1,-3.3256856E-1,6.34408E-1,-2.8483411E-2,3.0500263E-1,-4.775623E-1,-1.7026057E-2,-4.5866843E-2,3.880471E-1,-8.6047935E-1,-3.1303589E-3,-2.3793314E-1,1.1214384E-1,9.417432E-1,3.1621355E-1,-2.4460937E-1,3.6274767E-1,-1.0779274E0,-2.5679058E-1,-5.8625424E-1,4.992322E-1,8.209359E-2,-9.266802E-1,-6.7567825E-1,3.3622417E-1,1.4097219E-2,2.7516904E-1,-2.4759252E-1,3.4865323E-1,-7.826429E-1,2.4916384E-1,2.0956298E-2,-1.5970771E-1,-3.2476962E-1,3.1161053E-2,2.147461E-1,-1.0126684E-1,3.457308E-1,-6.375696E-2,4.8592337E-3,1.8597291E-1,-1.9800054E-1,1.7272902E-1,-3.9051667E-1,-3.13938E-2,1.9969871E-2,-2.3435056E-1,-2.4764718E-1,-7.2865887E-3,1.7765686E-1,-9.755147E-2,8.2482055E-2,-1.4139557E-1,-4.2237592E-1,4.256582E-2,-3.0504274E-1,4.1627087E-2,-1.4575441E-1,2.8378668E-1,3.7908596E-1,5.5324943E-3,-3.15194E-1,5.0590325E-2,1.2759997E-1,-3.177244E-3,-3.402165E-2,-2.2733611E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":31,"left_children":[1,3,5,7,9,11,13,15,17,19,21,-1,23,25,27,29,31,33,35,37,39,41,43,45,47,49,-1,51,53,-1,-1,55,57,-1,-1,59,61,-1,63,65,67,69,71,73,75,77,79,-1,-1,-1,81,83,-1,85,87,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[6.9393057E-1,2.6678221E0,1.4693499E0,4.126931E0,2.4235504E0,2.9348338E0,1.4066999E0,3.862934E0,3.0582833E0,3.823048E0,2.204444E0,0E0,2.2677534E0,4.44084E0,2.0329301E0,1.8027179E0,2.914636E0,1.7230625E0,2.3782167E0,3.0051432E0,2.756009E0,3.3615592E0,3.9049196E0,1.949764E0,5.5230176E-1,5.455808E0,0E0,4.612306E0,1.5279262E0,0E0,0E0,1.680975E0,2.564889E0,0E0,0E0,2.208928E0,3.1960828E0,0E0,2.8470573E0,1.3557897E0,2.3246613E0,4.5746696E-1,1.5002127E0,2.1305823E0,2.693913E0,1.6118388E0,2.0701344E0,0E0,0E0,0E0,3.7158332E0,2.3609858E0,0E0,2.8500597E0,1.5551127E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,27,27,28,28,31,31,32,32,35,35,36,36,38,38,39,39,40,40,41,41,42,42,43,43,44,44,45,45,46,46,50,50,51,51,53,53,54,54],"right_children":[2,4,6,8,10,12,14,16,18,20,22,-1,24,26,28,30,32,34,36,38,40,42,44,46,48,50,-1,52,54,-1,-1,56,58,-1,-1,60,62,-1,64,66,68,70,72,74,76,78,80,-1,-1,-1,82,84,-1,86,88,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.0994488E0,1.3903836E0,-1.0427527E0,-7.3706096E-1,1.7677041E0,-4.0243417E-1,-9.660462E-1,-7.0572996E-1,-8.5735327E-1,9.406206E-3,-2.1057709E-1,-3.041782E-1,-4.294823E-1,3.240115E-2,-9.3936574E-1,-9.874513E-1,-8.0783725E-1,-6.35789E-1,5.9381795E-1,-1.7998126E0,-6.289329E-1,-3.3469358E-1,2.699239E-1,2.8468606E-1,-5.9413753E-2,-2.5497583E-1,3.0500263E-1,-3.435213E-1,2.1463482E-1,-4.5866843E-2,3.880471E-1,4.9114594E-1,-6.86076E-1,-2.3793314E-1,1.1214384E-1,2.522303E-1,-6.289329E-1,-2.4460937E-1,-1.1510396E0,-3.4540343E-1,1.683855E0,-5.461783E-1,1.4247327E-1,2.1586609E-1,1.9592093E-1,5.868313E-2,-6.289329E-1,1.4097219E-2,2.7516904E-1,-2.4759252E-1,-7.992597E-1,2.9675448E-1,2.4916384E-1,-1.6056012E0,2.343715E0,-3.2476962E-1,3.1161053E-2,2.147461E-1,-1.0126684E-1,3.457308E-1,-6.375696E-2,4.8592337E-3,1.8597291E-1,-1.9800054E-1,1.7272902E-1,-3.9051667E-1,-3.13938E-2,1.9969871E-2,-2.3435056E-1,-2.4764718E-1,-7.2865887E-3,1.7765686E-1,-9.755147E-2,8.2482055E-2,-1.4139557E-1,-4.2237592E-1,4.256582E-2,-3.0504274E-1,4.1627087E-2,-1.4575441E-1,2.8378668E-1,3.7908596E-1,5.5324943E-3,-3.15194E-1,5.0590325E-2,1.2759997E-1,-3.177244E-3,-3.402165E-2,-2.2733611E-1],"split_indices":[7,10,7,8,10,6,7,5,4,2,1,0,3,1,7,5,3,1,10,7,0,2,6,2,2,5,0,4,1,0,0,2,4,0,0,6,0,0,9,3,10,9,0,2,8,9,0,0,0,0,8,0,0,6,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.55326E2,1.3291037E2,3.2241562E2,6.2419483E1,7.049088E1,1.2565152E1,3.0985046E2,1.7359175E1,4.506031E1,3.0276016E1,4.0214867E1,3.47447E0,9.090682E0,2.0602276E1,2.892482E2,2.372659E0,1.4986516E1,3.796318E0,4.1263992E1,1.2022935E1,1.825308E1,1.8615324E1,2.1599543E1,6.506821E0,2.5838604E0,1.619753E1,4.4047456E0,8.92859E0,2.803196E2,1.0744009E0,1.2982581E0,6.323807E0,8.662709E0,2.57075E0,1.2255679E0,7.075843E0,3.418815E1,1.6989927E0,1.0323942E1,5.849884E0,1.2403196E1,2.5518281E0,1.6063496E1,1.7749342E1,3.8502002E0,4.2937236E0,2.2130978E0,1.2187287E0,1.3651319E0,4.821717E0,1.13758135E1,7.5657773E0,1.3628122E0,2.2205968E2,5.8259937E1,5.040429E0,1.2833774E0,2.388167E0,6.2745423E0,5.9849687E0,1.0908742E0,1.7728153E1,1.6459995E1,1.417887E0,8.906055E0,4.4818726E0,1.3680116E0,8.212209E0,4.190988E0,1.4609482E0,1.0908798E0,1.4688895E1,1.3746003E0,1.3535752E1,4.213589E0,2.4356863E0,1.4145138E0,2.8510582E0,1.4426655E0,1.028967E0,1.1841308E0,2.267159E0,9.108654E0,5.8212852E0,1.7444919E0,1.5166761E1,2.0689291E2,5.5177788E1,3.0821488E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"89","size_leaf_vector":"1"}},{"base_weights":[-5.9561185E-3,-3.5047118E-2,4.2216666E-2,-7.059933E-2,1.2733379E-1,5.7553476E-1,2.0660643E-2,-4.4004783E-2,-4.7003996E-1,-3.9124435E-1,2.2949153E-1,-2.401012E-2,7.712815E-1,-2.3389955E-1,4.0094238E-2,-8.095641E-3,-2.0907868E-1,-7.2390753E-1,2.8016452E-2,-6.6345555E-1,1.902052E-1,9.277132E-1,1.4138475E-1,3.5525602E-1,1.1071856E-1,7.397204E-2,-2.9155257E-1,7.984047E-3,-5.676544E-1,-6.020641E-1,2.6283825E-2,-9.504952E-1,5.1477112E-2,-5.084728E-1,1.9533156E-1,3.847909E-2,-8.941941E-1,3.4901303E-1,6.0348593E-2,3.6217973E-1,-1.167576E-1,-8.081469E-2,1.3180096E-1,2.451575E-1,-2.3957781E-2,-6.46985E-1,2.6841027E-1,-1.2439032E-2,6.61371E-2,3.5479914E-2,-2.3877265E-1,1.6826189E-1,-2.2214423E-1,-2.0675592E-1,6.364366E-2,-6.363237E-2,-3.101135E-1,1.3943322E-1,-1.1517301E-1,2.5973457E-4,-2.2165655E-1,-3.3361256E-1,-1.3849318E-1,-2.319838E-1,1.4659633E-1,-2.4146967E-1,2.2046648E-2,7.553009E-3,1.520813E-1,-3.1077754E-2,1.09571464E-1,-2.5376678E-1,1.05346136E-1,2.3168023E-1,-8.332685E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":32,"left_children":[1,3,5,7,9,11,13,15,17,19,21,-1,23,-1,25,27,29,31,33,35,-1,37,39,-1,41,43,45,47,49,51,53,55,57,59,-1,-1,61,-1,-1,63,65,-1,-1,67,69,71,73,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[6.319562E-1,1.6314768E0,1.9406836E0,2.4354439E0,2.7830029E0,9.0366983E-1,2.5864034E0,1.2898273E0,1.8321216E0,2.8588448E0,2.5203464E0,0E0,1.2725692E0,0E0,1.8343023E0,1.6240184E0,3.612717E0,1.7230763E0,2.2701485E0,1.465081E0,0E0,5.6208944E-1,2.2908978E0,0E0,5.034012E-1,2.4733007E0,3.1901083E0,1.8577225E0,8.8348234E-1,2.7667918E0,3.5078576E0,2.3699903E-1,7.3689264E-1,4.322828E-1,0E0,0E0,3.781843E-2,0E0,0E0,3.4665415E0,2.539212E0,0E0,0E0,3.0766923E0,2.967833E0,2.1958766E0,2.0284262E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,12,12,14,14,15,15,16,16,17,17,18,18,19,19,21,21,22,22,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,36,36,39,39,40,40,43,43,44,44,45,45,46,46],"right_children":[2,4,6,8,10,12,14,16,18,20,22,-1,24,-1,26,28,30,32,34,36,-1,38,40,-1,42,44,46,48,50,52,54,56,58,60,-1,-1,62,-1,-1,64,66,-1,-1,68,70,72,74,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.8775555E-1,5.056718E-1,-1.1632603E0,5.1763475E-1,-9.494192E-1,-5.1127385E-2,-7.2091955E-1,-5.5560064E-1,8.8920105E-1,5.069149E-1,-8.1500554E-1,-2.401012E-2,1.3805008E-1,-2.3389955E-1,9.323718E-1,1.9363651E0,-3.3206618E-1,-2.1057709E-1,1.4247327E-1,6.747046E-1,1.902052E-1,-4.5770618E-1,1.4704613E-1,3.5525602E-1,9.324553E-3,-1.4266019E-3,6.2878585E-1,-6.6069925E-1,-7.3706096E-1,-1.1217455E0,-1.5463476E0,-7.4779874E-1,-2.5497583E-1,5.691137E-1,1.9533156E-1,3.847909E-2,-5.9036255E-1,3.4901303E-1,6.0348593E-2,-5.411535E-1,-3.445578E-1,-8.081469E-2,1.3180096E-1,7.3356634E-1,6.9351125E-1,1.6043851E0,9.0406567E-1,-1.2439032E-2,6.61371E-2,3.5479914E-2,-2.3877265E-1,1.6826189E-1,-2.2214423E-1,-2.0675592E-1,6.364366E-2,-6.363237E-2,-3.101135E-1,1.3943322E-1,-1.1517301E-1,2.5973457E-4,-2.2165655E-1,-3.3361256E-1,-1.3849318E-1,-2.319838E-1,1.4659633E-1,-2.4146967E-1,2.2046648E-2,7.553009E-3,1.520813E-1,-3.1077754E-2,1.09571464E-1,-2.5376678E-1,1.05346136E-1,2.3168023E-1,-8.332685E-2],"split_indices":[3,5,6,6,9,3,6,3,6,8,9,0,3,0,9,2,1,1,0,5,0,4,2,0,8,5,2,3,8,1,7,9,5,8,0,0,0,0,0,2,9,0,0,10,8,5,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.4889243E2,2.8004742E2,1.6884499E2,2.3020976E2,4.9837666E1,5.5609875E0,1.63284E2,2.1688016E2,1.3329601E1,7.7363286E0,4.2101337E1,1.4145855E0,4.146402E0,2.9435027E0,1.603405E2,1.7899232E2,3.788783E1,8.528844E0,4.800757E0,6.3920927E0,1.3442358E0,3.6496284E0,3.845171E1,2.063718E0,2.0826838E0,1.4618466E2,1.415584E1,1.749506E2,4.041714E0,1.3608182E1,2.4279648E1,6.425351E0,2.103493E0,2.678486E0,2.122271E0,1.542731E0,4.849362E0,2.3017902E0,1.347838E0,2.0507856E1,1.7943853E1,1.0492777E0,1.033406E0,5.264885E1,9.353581E1,8.564303E0,5.591537E0,1.425749E2,3.2375706E1,1.1294631E0,2.912251E0,1.1227866E0,1.2485395E1,4.4482303E0,1.9831417E1,1.0102419E0,5.415109E0,1.0444877E0,1.0590053E0,1.1444755E0,1.5340105E0,2.180359E0,2.6690028E0,1.5428069E0,1.896505E1,3.1866357E0,1.4757218E1,2.920407E1,2.344478E1,7.825717E1,1.527864E1,7.268421E0,1.2958819E0,2.693091E0,2.8984463E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"75","size_leaf_vector":"1"}},{"base_weights":[-6.518015E-3,-2.1585852E-3,-4.0101185E-1,3.27563E-3,-3.792553E-1,6.745863E-2,-2.1596006E-1,-4.8046935E-4,1.912801E-1,-6.139449E-1,4.5025613E-2,-1.6724642E-1,1.1772171E-2,-2.2490357E-1,-3.0167094E-2,-5.302227E-1,1.8419454E-2,3.392642E-1,-5.4481574E-3,-2.2079699E-1,1.803997E-1,2.223423E-1,-4.453687E-2,2.4628907E-2,2.8813162E-1,-2.0535539E-1,1.3608326E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":33,"left_children":[1,3,5,7,9,-1,-1,11,-1,13,-1,15,17,-1,-1,19,21,23,25,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[7.644756E-1,9.0529287E-1,1.1314833E0,1.0388551E0,8.717866E-1,0E0,0E0,8.894345E-1,0E0,2.4129033E-1,0E0,2.0175464E0,2.2888558E0,0E0,0E0,2.7978637E0,2.6037455E0,3.159913E0,2.6219382E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,7,7,9,9,11,11,12,12,15,15,16,16,17,17,18,18],"right_children":[2,4,6,8,10,-1,-1,12,-1,14,-1,16,18,-1,-1,20,22,24,26,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9354604E0,1.9762269E0,-1.2691727E0,1.9458547E0,3.0088844E0,6.745863E-2,-2.1596006E-1,-1.2128284E0,1.912801E-1,1.201199E0,4.5025613E-2,3.7022063E-1,-1.4286649E0,-2.2490357E-1,-3.0167094E-2,2.8141627E0,5.9381795E-1,-5.976401E-1,-1.1001399E0,-2.2079699E-1,1.803997E-1,2.223423E-1,-4.453687E-2,2.4628907E-2,2.8813162E-1,-2.0535539E-1,1.3608326E-3],"split_indices":[7,1,5,1,1,0,0,5,0,9,0,10,6,0,0,9,10,3,5,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.440258E2,4.4016708E2,3.8586795E0,4.3489133E2,5.2757607E0,1.4018327E0,2.4568467E0,4.3332468E2,1.5666524E0,3.5444546E0,1.731306E0,2.8792418E1,4.0453226E2,2.4392803E0,1.1051744E0,9.115617E0,1.96768E1,1.9274431E1,3.8525784E2,8.008831E0,1.1067863E0,3.0455515E0,1.6631248E1,1.4431794E1,4.842637E0,4.6033883E0,3.8065445E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"27","size_leaf_vector":"1"}},{"base_weights":[-4.967784E-3,-5.8992975E-4,-3.0753878E-1,-3.728645E-3,1.8171577E-1,1.7733067E-1,-6.0684913E-1,-1.4786794E-2,1.7020823E-1,5.6060525E-3,-2.7779463E-1,-6.781165E-3,-5.9925836E-1,2.3502235E-1,6.44435E-2,3.032109E-3,-4.3192944E-1,1.5467012E-2,-7.8744966E-1,-2.5245857E-1,4.501013E-1,-5.183336E-2,5.197109E-3,8.350895E-2,-2.0571877E-1,-2.6779124E-1,-7.093106E-2,1.4784495E-2,-2.535222E-1,-1.8182546E-1,1.9480349E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":34,"left_children":[1,3,5,7,-1,-1,9,11,13,-1,-1,15,17,-1,19,21,23,-1,25,27,29,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.8621264E-1,8.346052E-1,2.0564535E0,8.4136E-1,0E0,0E0,1.0610307E0,1.9188973E0,1.6454185E0,0E0,0E0,1.6969739E0,7.193278E-1,0E0,2.9725163E0,1.0036458E0,1.8451546E0,0E0,4.1657686E-2,2.4523253E0,2.6896024E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,6,6,7,7,8,8,11,11,12,12,14,14,15,15,16,16,18,18,19,19,20,20],"right_children":[2,4,6,8,-1,-1,10,12,14,-1,-1,16,18,-1,20,22,24,-1,26,28,30,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[3.7680821E0,3.6909416E0,-2.1057709E-1,1.8732673E0,1.8171577E-1,1.7733067E-1,1.4028195E0,2.7652543E0,-1.0146359E0,5.6060525E-3,-2.7779463E-1,1.6716468E0,-7.447781E-1,2.3502235E-1,1.0681607E0,-1.0917766E0,-9.874513E-1,1.5467012E-2,1.0667855E0,2.1463482E-1,-1.6082309E-1,-5.183336E-2,5.197109E-3,8.350895E-2,-2.0571877E-1,-2.6779124E-1,-7.093106E-2,1.4784495E-2,-2.535222E-1,-1.8182546E-1,1.9480349E-1],"split_indices":[0,0,1,9,0,0,9,0,0,0,0,9,3,0,0,0,5,0,9,1,10,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.4169128E2,4.3637933E2,5.3119574E0,4.3512067E2,1.2586639E0,1.0836266E0,4.2283306E0,4.0997147E2,2.5149212E1,1.7464802E0,2.4818504E0,4.054298E2,4.5416546E0,2.7571003E0,2.239211E1,3.9725327E2,8.176534E0,1.1815578E0,3.3600967E0,1.2481344E1,9.910768E0,2.903129E1,3.68222E2,2.1270149E0,6.0495186E0,2.300869E0,1.0592276E0,8.877867E0,3.6034772E0,1.2488282E0,8.661939E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"31","size_leaf_vector":"1"}},{"base_weights":[-4.763583E-3,1.5129928E-2,-7.01232E-2,-4.6177073E-3,3.954851E-1,-3.0639258E-1,5.0793458E-2,-9.907449E-3,1.7887013E-1,6.8142956E-1,-1.9091399E-1,1.4999834E-1,-3.6363158E-1,3.8286117E-1,-6.4818114E-2,1.3130152E-2,-2.0043983E-1,-5.8046006E-2,1.119156E0,1.9337015E-1,-6.1681527E-1,9.782599E-2,-4.8724756E-1,9.9279743E-1,2.1331218E-1,-2.68519E-1,9.041944E-3,4.151602E-2,-1.6458789E-1,-4.900296E-2,-7.329461E-1,-2.110065E-1,3.0949974E-1,4.4528523E-1,5.29551E-1,-2.571742E-1,1.8762978E-2,4.4920075E-1,-2.5266227E-1,-7.0848846E-1,-3.5971403E-2,3.4572273E-2,4.0580815E-1,4.7806528E-1,-3.6816043E-1,8.721278E-1,-6.112339E-2,-5.9942268E-2,2.1088224E-2,-8.758244E-3,-2.441576E-1,-8.935337E-2,9.940031E-2,-2.724465E-1,2.1064382E-2,-1.3501151E-1,2.6095003E-1,3.2715517E-1,-8.029285E-2,-1.2444225E-2,2.1744E-1,-1.868842E-1,1.2148631E-1,-1.3155353E-1,-3.363514E-1,2.1429451E-1,-2.204028E-1,1.861307E-1,-9.028749E-2,-2.1844164E-1,1.497988E-1,3.354375E-2,3.3732367E-1,4.8401304E-2,-1.1352582E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":35,"left_children":[1,3,5,7,9,11,13,15,-1,17,19,-1,21,23,25,27,29,31,33,-1,35,37,39,41,43,-1,45,47,49,51,53,-1,55,-1,57,-1,-1,59,61,63,65,-1,-1,67,69,71,73,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.737413E-1,2.5442712E0,2.9568355E0,1.0317914E0,2.933368E0,1.7970545E0,2.6722116E0,1.4113668E0,0E0,3.773469E0,2.6255171E0,0E0,1.9446049E0,1.6448379E0,3.1971896E0,1.4559613E0,2.7607923E0,1.453597E0,7.7396774E-1,0E0,8.144717E-1,1.0813347E0,2.6098514E0,9.910219E-1,2.5347424E0,0E0,3.0053759E0,1.739794E0,3.4718194E0,2.7556796E0,1.134274E0,0E0,2.0999446E0,0E0,2.2264047E0,0E0,0E0,6.094574E-1,1.3661572E0,1.47577E0,5.6291494E0,0E0,0E0,1.4026546E0,2.0369396E0,5.89844E-1,3.30919E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,9,9,10,10,12,12,13,13,14,14,15,15,16,16,17,17,18,18,20,20,21,21,22,22,23,23,24,24,26,26,27,27,28,28,29,29,30,30,32,32,34,34,37,37,38,38,39,39,40,40,43,43,44,44,45,45,46,46],"right_children":[2,4,6,8,10,12,14,16,-1,18,20,-1,22,24,26,28,30,32,34,-1,36,38,40,42,44,-1,46,48,50,52,54,-1,56,-1,58,-1,-1,60,62,64,66,-1,-1,68,70,72,74,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.24572E-1,1.2452005E-1,6.7480576E-1,2.6749263E0,2.521226E-1,-1.3039792E0,9.015902E-1,6.768775E-1,1.7887013E-1,-5.3669715E-1,-4.5770618E-1,1.4999834E-1,-1.6763757E0,-7.3706096E-1,9.682915E-1,-2.8569296E-1,3.2751042E-1,2.5811973E-1,2.5811973E-1,1.9337015E-1,-8.6605296E-2,3.825173E-1,5.884093E-1,7.8739315E-1,9.2987575E-2,-2.68519E-1,9.9497205E-1,-1.0917766E0,3.929833E-1,-4.5244622E-1,1.5480409E0,-2.110065E-1,9.995786E-1,4.4528523E-1,-1.180954E0,-2.571742E-1,1.8762978E-2,7.3075134E-1,-1.1564841E0,3.53506E-1,8.7150747E-1,3.4572273E-2,4.0580815E-1,1.066704E0,1.6325573E0,6.8245757E-1,9.0406567E-1,-5.9942268E-2,2.1088224E-2,-8.758244E-3,-2.441576E-1,-8.935337E-2,9.940031E-2,-2.724465E-1,2.1064382E-2,-1.3501151E-1,2.6095003E-1,3.2715517E-1,-8.029285E-2,-1.2444225E-2,2.1744E-1,-1.868842E-1,1.2148631E-1,-1.3155353E-1,-3.363514E-1,2.1429451E-1,-2.204028E-1,1.861307E-1,-9.028749E-2,-2.1844164E-1,1.497988E-1,3.354375E-2,3.3732367E-1,4.8401304E-2,-1.1352582E-1],"split_indices":[7,7,7,5,5,1,7,6,0,5,4,0,6,8,7,7,9,8,8,0,4,8,6,5,6,0,7,0,5,7,5,0,9,0,6,0,0,9,5,2,6,0,0,8,5,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.3944464E2,3.37491E2,1.01953636E2,3.2177277E2,1.5718239E1,3.399466E1,6.795898E1,3.199396E2,1.8331549E0,1.0457039E1,5.2612004E0,1.7404107E0,3.2254246E1,1.6953197E1,5.1005783E1,2.8625867E2,3.3680946E1,4.210834E0,6.2462044E0,1.6030712E0,3.658129E0,6.8588486E0,2.53954E1,2.6314144E0,1.4321782E1,3.2585664E0,4.7747215E1,2.4749425E2,3.8764423E1,2.7073631E1,6.6073146E0,1.1964844E0,3.01435E0,2.9203079E0,3.3258967E0,2.5084975E0,1.1496316E0,3.284449E0,3.5743995E0,1.66585E1,8.736897E0,1.1492751E0,1.4821393E0,9.963218E0,4.358564E0,2.7304943E0,4.5016724E1,2.573614E1,2.217581E2,3.2940285E1,5.8241396E0,1.6497065E1,1.0576567E1,5.3167443E0,1.2905703E0,1.3632665E0,1.6510834E0,1.7362175E0,1.5896791E0,1.486805E0,1.7976441E0,2.3205078E0,1.2538916E0,1.1317314E1,5.3411875E0,4.2021656E0,4.534732E0,8.595856E0,1.367362E0,3.1938596E0,1.1647044E0,1.0398563E0,1.690638E0,2.6752007E1,1.8264715E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"75","size_leaf_vector":"1"}},{"base_weights":[-4.184566E-3,-1.3518573E-1,-1.4914108E-3,2.4851373E-1,-6.8601584E-3,7.503212E-1,-3.940261E-1,-4.0576714E-1,2.008889E-3,-4.606174E-2,1.0327828E0,-2.151853E-1,1.0431841E-1,1.3859046E-1,-7.0046866E-1,-1.8521193E-2,1.00505866E-1,4.4005263E-1,7.32232E-2,6.142702E-2,-9.0175885E-1,-2.1262296E-1,5.8174278E-3,1.9057362E-1,-1.6123424E-1,-2.9191938E-1,-8.1268765E-2,5.0299738E-2,-1.4323922E-1,1.8634683E-1,-1.52178E-3,2.6621467E-2,1.8674204E-1,-2.0465358E-1,1.1846299E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":36,"left_children":[1,-1,3,5,7,9,11,13,15,-1,17,-1,-1,-1,19,21,23,-1,-1,-1,25,27,29,31,33,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.226517E-1,0E0,5.83764E-1,3.2438445E0,1.5062454E0,1.5572381E0,1.3680458E0,2.6915195E0,8.456089E-1,0E0,1.115231E0,0E0,0E0,0E0,1.5165482E0,1.636405E0,1.731011E0,0E0,0E0,0E0,1.3725281E-2,3.9669104E0,2.070667E0,2.325009E0,2.0143554E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,3,3,4,4,5,5,6,6,7,7,8,8,10,10,14,14,15,15,16,16,20,20,21,21,22,22,23,23,24,24],"right_children":[2,-1,4,6,8,10,12,14,16,-1,18,-1,-1,-1,20,22,24,-1,-1,-1,26,28,30,32,34,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.9810368E0,-1.3518573E-1,-1.7322416E0,7.499093E-2,-1.5456452E0,-1.312893E0,1.1350448E0,-6.147578E-1,6.004839E-1,-4.606174E-2,-9.660462E-1,-2.151853E-1,1.0431841E-1,1.3859046E-1,-1.2089357E0,-9.236573E-1,-1.4369774E-1,4.4005263E-1,7.32232E-2,6.142702E-2,-2.523423E-1,-6.004373E-1,-1.0000844E0,-2.293364E-1,1.1049571E0,-2.9191938E-1,-8.1268765E-2,5.0299738E-2,-1.4323922E-1,1.8634683E-1,-1.52178E-3,2.6621467E-2,1.8674204E-1,-2.0465358E-1,1.1846299E-2],"split_indices":[8,0,8,3,8,7,7,6,3,0,7,0,0,0,10,8,4,0,0,0,7,4,4,4,3,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.342625E2,1.6066933E0,4.3265582E2,8.143648E0,4.2451218E2,4.478393E0,3.665255E0,8.259731E0,4.1625244E2,1.1749734E0,3.3034196E0,2.5757334E0,1.0895218E0,1.9501177E0,6.3096137E0,3.451281E2,7.1124344E1,1.5761731E0,1.7272464E0,1.1446716E0,5.1649423E0,3.7592213E1,3.075359E2,5.31178E1,1.8006542E1,4.153089E0,1.0118535E0,1.5583123E1,2.2009092E1,4.3734956E0,3.031624E2,4.395851E1,9.159288E0,4.341136E0,1.3665407E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"35","size_leaf_vector":"1"}},{"base_weights":[-3.3570484E-3,-1.6709036E-1,8.871437E-3,-3.114725E-1,3.4802672E-1,4.602717E-2,-4.8740473E-2,1.04873225E-1,-6.7614543E-1,-2.5235024E-1,8.626724E-1,1.5206117E-2,3.102398E-1,6.454111E-1,-6.31719E-2,-1.2521438E-1,7.570431E-1,-7.784875E-1,7.755008E-2,3.1399772E-1,2.723868E-2,2.6375573E-2,-2.3849337E-1,8.177682E-1,1.1005529E-1,2.8324319E-2,2.8465745E-1,-2.7969754E-1,1.5089915E-3,3.712696E-1,-2.552179E-1,2.839198E-1,4.9834475E-2,-9.1311467E-1,6.5783868E-3,9.0921875E-3,7.640823E-1,3.1802502E-1,4.5476444E-3,-2.6235873E-1,2.49218E-1,4.2989504E-2,-6.074183E-1,-4.2369607E-1,2.8509447E-2,2.3963296E-1,-6.0842667E-2,-3.124048E-1,-1.7763907E-2,1.4375407E-2,-6.178725E-2,-1.7286482E-4,3.1215924E-1,1.97806E-1,-2.4664236E-2,-7.3433034E-2,1.6356288E-1,6.974806E-2,-2.7586862E-1,1.437971E-1,-2.4479575E-1,1.563603E-1,2.1331832E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":37,"left_children":[1,3,5,7,9,11,13,15,17,-1,19,21,23,25,27,29,31,33,-1,-1,-1,35,-1,37,39,-1,-1,41,43,45,-1,-1,-1,47,-1,49,51,-1,-1,-1,53,55,57,59,61,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[8.6883754E-1,2.3511858E0,8.671434E-1,3.7271976E0,5.0551357E0,1.9997058E0,1.632565E0,1.9119476E0,1.4420214E0,0E0,6.762657E-1,2.0252225E0,2.5172565E0,4.6641517E-1,2.1932561E0,3.8493629E0,2.120564E-1,1.3067269E0,0E0,0E0,0E0,2.790895E0,0E0,1.3654151E0,2.8978481E0,0E0,0E0,3.8352027E0,1.4076188E0,1.7622318E0,0E0,0E0,0E0,1.0601568E0,0E0,1.808565E0,1.0599449E0,0E0,0E0,0E0,2.48942E0,2.8913617E0,4.91623E0,2.9845133E0,1.2112092E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,21,21,23,23,24,24,27,27,28,28,29,29,33,33,35,35,36,36,40,40,41,41,42,42,43,43,44,44],"right_children":[2,4,6,8,10,12,14,16,18,-1,20,22,24,26,28,30,32,34,-1,-1,-1,36,-1,38,40,-1,-1,42,44,46,-1,-1,-1,48,-1,50,52,-1,-1,-1,54,56,58,60,62,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-8.2880706E-1,1.8145575E-1,-2.8569296E-1,-5.7662034E-1,-7.0273864E-1,-4.5244622E-1,-1.1689172E0,-8.5735327E-1,6.2878585E-1,-2.5235024E-1,-8.858995E-1,1.041898E0,-8.4839493E-1,5.760031E-2,-2.432298E-1,7.8226164E-2,2.5811973E-1,1.9354022E0,7.755008E-2,3.1399772E-1,2.723868E-2,6.4252335E-1,-2.3849337E-1,3.4824587E-2,-8.1802267E-1,2.8324319E-2,2.8465745E-1,1.3943407E-1,-2.056514E-1,-1.6163839E0,-2.552179E-1,2.839198E-1,4.9834475E-2,3.6481115E-1,6.5783868E-3,-6.7923063E-1,-1.9705367E-1,3.1802502E-1,4.5476444E-3,-2.6235873E-1,-3.6573452E-1,-8.5943006E-2,-4.5770618E-1,1.3264844E-1,-1.6563061E-1,2.3963296E-1,-6.0842667E-2,-3.124048E-1,-1.7763907E-2,1.4375407E-2,-6.178725E-2,-1.7286482E-4,3.1215924E-1,1.97806E-1,-2.4664236E-2,-7.3433034E-2,1.6356288E-1,6.974806E-2,-2.7586862E-1,1.437971E-1,-2.4479575E-1,1.563603E-1,2.1331832E-3],"split_indices":[4,6,7,3,3,7,0,4,2,0,4,3,1,6,0,2,8,10,0,0,0,3,0,10,1,0,0,5,7,7,0,0,0,5,0,7,2,0,0,0,7,5,4,10,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.3219882E2,2.9155619E1,4.0304318E2,2.3081455E1,6.074163E0,2.4514352E2,1.5789967E2,1.1109827E1,1.1971628E1,1.6430651E0,4.431098E0,2.2048138E2,2.4662142E1,2.3253992E0,1.5557428E2,8.80973E0,2.3000972E0,1.0940717E1,1.0309118E0,3.2907205E0,1.1403775E0,2.1843738E2,2.044003E0,6.103315E0,1.8558826E1,1.2914801E0,1.0339192E0,3.5019337E1,1.2055494E2,5.520318E0,3.2894113E0,1.2863529E0,1.0137444E0,9.2449465E0,1.6957695E0,2.1442601E2,4.0113616E0,4.4415684E0,1.6617464E0,1.6439024E0,1.6914925E1,1.8082954E1,1.6936382E1,6.3208923E0,1.1423404E2,2.9397373E0,2.580581E0,7.847005E0,1.3979418E0,1.8229163E2,3.21344E1,1.3301295E0,2.6812322E0,7.1176643E0,9.797259E0,1.1821808E1,6.2611475E0,4.657966E0,1.2278416E1,1.8471166E0,4.473776E0,3.7827466E0,1.10451294E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"63","size_leaf_vector":"1"}},{"base_weights":[-2.4157707E-3,-1.8545436E-2,7.964626E-2,9.422109E-2,-4.4284143E-2,-2.4233137E-1,1.8453792E-1,-7.678117E-2,3.0109948E-1,6.134797E-1,-5.3411108E-2,-7.9991853E-1,8.323447E-2,2.8216407E-1,-4.4715482E-1,2.5348026E-1,-3.769372E-1,3.8853315E-1,-2.727837E-1,8.395366E-3,2.4317664E-1,-2.5325808E-1,-1.7496943E-2,-3.176801E-1,-1.7233518E-1,-3.9439985E-1,6.036601E-1,-1.8095514E-2,4.4016027E-1,2.2341779E-1,-2.7966884E-1,-1.6102503E-1,3.6937818E-1,-4.9307397E-1,1.8842676E-1,-4.6281923E-2,7.522018E-1,3.3512987E-2,-5.281555E-1,-3.0779454E-1,3.0817932E-3,1.13852404E-1,-1.8318026E-1,-7.6787806E-1,1.4918834E-1,2.7211338E-1,-1.6517343E-2,-5.212951E-1,2.60396E-1,5.505255E-1,-3.1389E-1,3.074686E-1,4.1351378E-2,1.8312449E-2,-1.94463E-1,8.173512E-2,-2.0574772E-1,8.466132E-2,3.3543304E-1,-2.4358908E-2,2.779575E-1,-1.852729E-1,1.1443245E-1,-2.0657654E-1,1.07373685E-1,7.0597485E-2,-1.0613598E-2,-4.4187937E-2,-2.807508E-1,-8.6772144E-2,1.5980586E-1,-2.2616419E-1,1.0008709E-1,-1.9739419E-1,1.4827435E-1,1.896445E-1,-1.3935022E-1,-1.8671665E-1,1.1265513E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":38,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,-1,-1,-1,37,39,-1,41,43,45,47,49,-1,-1,-1,51,53,-1,55,57,59,61,63,65,-1,-1,67,69,-1,-1,71,73,75,77,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.6782895E-1,1.0444211E0,2.4248533E0,2.390606E0,1.788635E0,3.288695E0,3.4393969E0,3.806452E0,3.5797126E0,4.4851124E-1,2.072666E0,8.9645386E-1,3.2488399E0,2.2446034E0,5.0757875E0,1.8854126E0,2.6456575E0,4.641762E0,0E0,0E0,0E0,3.4871557E0,1.4728498E0,0E0,9.895727E-1,1.5195591E0,1.2872975E0,2.5482912E0,2.7469225E0,0E0,0E0,0E0,2.3870013E0,1.6299248E0,0E0,3.0837064E0,2.42284E0,2.3780458E0,2.0680952E0,4.3834434E0,2.072253E0,0E0,0E0,3.066895E-1,7.687377E-1,0E0,0E0,1.5373242E0,2.8485012E0,2.5591545E0,1.209083E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,21,21,22,22,24,24,25,25,26,26,27,27,28,28,32,32,33,33,35,35,36,36,37,37,38,38,39,39,40,40,43,43,44,44,47,47,48,48,49,49,50,50],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,-1,-1,-1,38,40,-1,42,44,46,48,50,-1,-1,-1,52,54,-1,56,58,60,62,64,66,-1,-1,68,70,-1,-1,72,74,76,78,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[7.8739315E-1,-8.8221234E-1,-8.1500554E-1,-1.6608919E-1,-1.0000844E0,-6.126634E-1,1.3846228E0,-6.289329E-1,5.691137E-1,-8.4659064E-1,-1.3762592E0,7.055825E-1,-5.9413753E-2,-8.1802267E-1,-8.1802267E-1,-1.9132047E0,1.8934776E0,-1.0838329E0,-2.727837E-1,8.395366E-3,2.4317664E-1,-2.9627237E-1,-1.0146359E0,-3.176801E-1,-5.4334486E-1,-5.9036255E-1,-2.8569296E-1,-1.6082309E-1,1.773418E0,2.2341779E-1,-2.7966884E-1,-1.6102503E-1,-1.7131008E0,-1.1724524E0,1.8842676E-1,-5.504646E-1,-7.3706096E-1,-3.3165962E-1,2.522345E0,1.3065345E0,-1.0994488E0,1.13852404E-1,-1.8318026E-1,-1.0917766E0,-1.016626E0,2.7211338E-1,-1.6517343E-2,2.7579596E-2,-6.289835E-1,8.467204E-2,6.4971733E-1,3.074686E-1,4.1351378E-2,1.8312449E-2,-1.94463E-1,8.173512E-2,-2.0574772E-1,8.466132E-2,3.3543304E-1,-2.4358908E-2,2.779575E-1,-1.852729E-1,1.1443245E-1,-2.0657654E-1,1.07373685E-1,7.0597485E-2,-1.0613598E-2,-4.4187937E-2,-2.807508E-1,-8.6772144E-2,1.5980586E-1,-2.2616419E-1,1.0008709E-1,-1.9739419E-1,1.4827435E-1,1.896445E-1,-1.3935022E-1,-1.8671665E-1,1.1265513E-1],"split_indices":[5,9,9,0,4,8,6,0,8,5,7,3,2,1,1,7,10,9,0,0,0,6,0,0,4,0,7,10,5,0,0,0,7,8,0,8,8,6,10,10,7,0,0,0,9,0,0,4,4,4,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.2687683E2,3.574019E2,6.947495E1,6.592227E1,2.914796E2,1.674963E1,5.2725323E1,3.643476E1,2.9487513E1,3.0829208E0,2.883967E2,5.637477E0,1.1112154E1,4.61468E1,6.5785236E0,1.7421455E1,1.9013306E1,2.8134192E1,1.3533207E0,1.0640953E0,2.0188255E0,4.3010406E1,2.4538628E2,3.5039952E0,2.133482E0,5.9205613E0,5.191592E0,1.6215662E1,2.9931137E1,1.748063E0,4.8304605E0,1.7638327E0,1.5657622E1,1.750042E1,1.5128853E0,1.3211178E1,1.4923015E1,2.1480349E1,2.1530058E1,1.5319673E1,2.300666E2,1.0037174E0,1.1297646E0,3.2648208E0,2.6557405E0,3.2963936E0,1.8951987E0,5.4665084E0,1.0749154E1,2.6345095E1,3.586043E0,3.1925952E0,1.2465027E1,3.9605696E0,1.353985E1,9.200084E0,4.0110946E0,7.3077226E0,7.6152916E0,1.9840826E1,1.6395215E0,1.9895237E1,1.6348201E0,9.723385E0,5.5962877E0,3.1959684E1,1.9810693E2,1.0951893E0,2.1696315E0,1.3538046E0,1.3019359E0,4.3902893E0,1.0762191E0,1.8135122E0,8.935641E0,2.4733381E1,1.6117128E0,2.5445905E0,1.0414524E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"79","size_leaf_vector":"1"}},{"base_weights":[-3.1729813E-3,-1.9229127E-2,8.990884E-2,-2.0207177E-1,-1.5358384E-2,1.7320856E-1,-4.5629495E-1,4.9097192E-1,-2.448031E-2,1.0710636E-1,8.1335837E-1,-6.877756E-1,1.4443068E-1,-1.682175E-2,6.7016137E-1,-4.737387E-2,9.075504E-2,4.198162E-1,-6.349926E-2,2.9904734E-2,1.0985091E0,-7.77487E-1,-3.643451E-2,1.2949139E-1,-6.0907036E-2,3.0277858E-2,2.520506E-1,-2.2771979E-2,-3.6619216E-1,4.103894E-1,-1.8156817E-2,5.0331587E-1,-1.1534294E-1,-6.6767645E-1,7.993861E-2,3.869871E-1,1.0935617E-1,-7.279459E-2,-2.573389E-1,-1.1066771E-2,1.5412918E-1,-1.7456096E-1,9.660308E-2,1.606063E-1,-1.3238333E-1,-2.2583024E-1,9.6749095E-3,1.8854E-1,-3.1272728E-2,-3.1198657E-1,9.208977E-2,6.819726E-2,-1.2793274E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":39,"left_children":[1,3,5,-1,7,9,11,13,15,17,19,21,23,-1,25,27,29,31,33,-1,35,37,-1,-1,-1,-1,-1,39,41,43,45,47,-1,49,51,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[6.3157225E-1,9.021064E-1,2.9079666E0,0E0,1.6708717E0,2.2264118E0,1.2816886E0,6.6527975E-1,9.3755984E-1,2.7319314E0,9.0101457E-1,2.2622132E-1,4.0611628E-1,0E0,3.8946915E-1,2.3116314E0,2.0604763E0,1.404428E0,2.971901E0,0E0,1.4321804E-2,1.7917871E-2,0E0,0E0,0E0,0E0,0E0,2.1069043E0,3.3226275E0,1.8358979E0,1.6885276E0,1.3404922E0,0E0,2.7024443E0,2.1948016E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,14,14,15,15,16,16,17,17,18,18,20,20,21,21,27,27,28,28,29,29,30,30,31,31,33,33,34,34],"right_children":[2,4,6,-1,8,10,12,14,16,18,20,22,24,-1,26,28,30,32,34,-1,36,38,-1,-1,-1,-1,-1,40,42,44,46,48,-1,50,52,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.066704E0,-1.8886994E0,5.414032E-1,-2.0207177E-1,-1.8179249E0,3.902867E-1,1.1049571E0,2.603036E-1,6.7480576E-1,1.3154992E0,1.3154992E0,1.7939934E0,-5.461783E-1,-1.682175E-2,4.6804446E-1,2.24572E-1,9.015902E-1,5.414116E-1,1.4398967E0,2.9904734E-2,-2.1057709E-1,5.612582E-2,-3.643451E-2,1.2949139E-1,-6.0907036E-2,3.0277858E-2,2.520506E-1,1.678759E-1,4.5761305E-1,1.483437E0,9.682915E-1,1.4247327E-1,-1.1534294E-1,9.323718E-1,-5.5560064E-1,3.869871E-1,1.0935617E-1,-7.279459E-2,-2.573389E-1,-1.1066771E-2,1.5412918E-1,-1.7456096E-1,9.660308E-2,1.606063E-1,-1.3238333E-1,-2.2583024E-1,9.6749095E-3,1.8854E-1,-3.1272728E-2,-3.1198657E-1,9.208977E-2,6.819726E-2,-1.2793274E-1],"split_indices":[8,6,7,0,6,3,3,9,7,8,8,1,9,0,10,7,7,4,8,0,1,4,0,0,0,0,0,7,1,4,7,0,0,9,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.2043094E2,3.5925473E2,6.117622E1,1.0952072E0,3.5815952E2,5.3673534E1,7.5026855E0,5.403516E0,3.52756E2,4.9707977E1,3.9655569E0,5.31119E0,2.191496E0,1.5027801E0,3.9007356E0,2.9478134E2,5.797468E1,1.7030802E1,3.2677177E1,1.5173949E0,2.4481618E0,4.2630296E0,1.0481606E0,1.0671209E0,1.1243749E0,1.2635734E0,2.6371622E0,2.746585E2,2.0122833E1,1.4030348E1,4.3944336E1,1.5768018E1,1.2627834E0,5.568248E0,2.7108929E1,1.3409396E0,1.1072222E0,1.0815604E0,3.1814692E0,2.6852435E2,6.1341295E0,1.5438901E1,4.683933E0,1.2558976E1,1.4713714E0,1.9269894E0,4.2017345E1,1.3046219E1,2.7217994E0,3.9807363E0,1.5875115E0,2.1424318E1,5.6846094E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"53","size_leaf_vector":"1"}},{"base_weights":[-3.3297888E-3,-1.3571425E-1,6.575565E-3,-6.432418E-1,7.3698424E-2,1.8358406E-2,-3.1758603E-1,-8.171211E-1,3.3230364E-2,-1.0025565E-1,5.81663E-1,1.2873833E-2,5.251452E-1,-8.012727E-1,3.0561993E-2,-5.5490427E-2,-2.696121E-1,-3.969634E-1,1.0439117E-1,8.1902003E-1,-6.439141E-2,-1.7589386E-1,2.6095076E-2,3.2556722E-1,-1.1939654E-1,-2.7396372E-1,-6.0009502E-2,2.671983E-1,-4.9566305E-1,5.7675723E-2,-5.6557107E-1,-4.5957E-2,2.0270681E-1,3.3261603E-1,3.4660804E-1,-2.4643765E-1,1.9357799E-1,2.7794188E-1,1.2417269E-2,-3.2321772E-1,2.695342E-1,-3.720673E-2,-1.8362765E-1,-5.228239E-3,-2.4666601E-1,-8.931616E-2,9.863762E-2,2.0767E-1,-4.5297805E-2,1.6671082E-1,-1.00656606E-1,-2.9234104E-2,1.8952724E-1,6.3794884E-3,-9.9409275E-2,-2.0686704E-1,4.3091115E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":40,"left_children":[1,3,5,7,9,11,13,15,-1,17,19,21,23,25,27,-1,-1,29,31,33,-1,35,37,-1,-1,-1,-1,39,41,-1,43,45,-1,-1,47,49,-1,51,53,55,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.4943544E-1,3.147331E0,1.4960198E0,1.220937E0,1.9620498E0,1.0422536E0,2.3379993E0,2.0128107E-1,0E0,1.0677131E0,1.2423521E0,9.389821E-1,2.5080643E0,2.0679379E-1,1.2471609E0,0E0,0E0,8.148134E-1,9.582219E-1,2.3906994E-1,0E0,1.5860713E0,1.2048125E0,0E0,0E0,0E0,0E0,2.833983E0,5.9395134E-2,0E0,7.987572E-1,9.92478E-1,0E0,0E0,6.524973E-1,1.8592831E0,0E0,2.4551725E0,1.0221665E0,8.279414E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,9,9,10,10,11,11,12,12,13,13,14,14,17,17,18,18,19,19,21,21,22,22,27,27,28,28,30,30,31,31,34,34,35,35,37,37,38,38,39,39],"right_children":[2,4,6,8,10,12,14,16,-1,18,20,22,24,26,28,-1,-1,30,32,34,-1,36,38,-1,-1,-1,-1,40,42,-1,44,46,-1,-1,48,50,-1,52,54,56,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-6.787934E-1,6.313125E-1,8.8282496E-1,1.683855E0,6.533265E-2,8.220804E-1,7.8226164E-2,-7.146222E-1,3.3230364E-2,6.4971733E-1,3.4227094E-1,-8.2880706E-1,1.4704613E-1,-4.2400864E-1,-6.855323E-1,-5.5490427E-2,-2.696121E-1,-1.4983252E-1,8.3089806E-2,-1.6232948E0,-6.439141E-2,1.0681607E0,-4.7233352E-1,3.2556722E-1,-1.1939654E-1,-2.7396372E-1,-6.0009502E-2,-1.3048095E0,-5.4334486E-1,5.7675723E-2,-1.5879076E0,-5.262896E-1,2.0270681E-1,3.3261603E-1,1.1561755E0,-9.3110704E-1,1.9357799E-1,-2.7132162E-1,2.2148988E0,1.0410126E0,2.695342E-1,-3.720673E-2,-1.8362765E-1,-5.228239E-3,-2.4666601E-1,-8.931616E-2,9.863762E-2,2.0767E-1,-4.5297805E-2,1.6671082E-1,-1.00656606E-1,-2.9234104E-2,1.8952724E-1,6.3794884E-3,-9.9409275E-2,-2.0686704E-1,4.3091115E-2],"split_indices":[2,8,1,10,0,1,2,4,0,10,10,4,2,5,6,0,0,1,5,6,0,0,2,0,0,0,0,6,4,0,6,6,0,0,1,5,0,1,10,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.173153E2,2.8166788E1,3.8914853E2,7.6221876E0,2.05446E1,3.7640988E2,1.2738631E1,6.125832E0,1.4963554E0,1.5901659E1,4.642941E0,3.7339432E2,3.015564E0,4.7867737E0,7.9518576E0,1.0736914E0,5.0521407E0,6.1072693E0,9.79439E0,3.5548117E0,1.0881294E0,2.3635275E1,3.4975903E2,1.7675674E0,1.2479967E0,3.5980132E0,1.1887604E0,5.824772E0,2.1270854E0,1.3276018E0,4.779668E0,8.481797E0,1.3125921E0,1.3686013E0,2.1862102E0,2.2409964E1,1.2253098E0,1.702167E1,3.3273737E2,3.2618704E0,2.5629015E0,1.0000747E0,1.1270107E0,1.864795E0,2.9148726E0,5.1963425E0,3.2854548E0,1.0592974E0,1.1269128E0,1.7164674E0,2.0693497E1,8.610616E0,8.411053E0,3.2530338E2,7.433988E0,1.5604023E0,1.701468E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"57","size_leaf_vector":"1"}},{"base_weights":[-2.71265E-3,5.193144E-2,-2.563422E-2,1.7168851E-1,-5.662573E-2,-2.9769322E-1,-1.405649E-2,-1.2540461E-1,2.8190398E-1,2.39543E-1,-1.6583987E-1,-8.009886E-1,7.35387E-3,-7.448052E-2,3.6193527E-2,2.226987E-1,-5.118631E-1,8.136488E-2,5.7612157E-1,3.73753E-1,-2.2404924E-1,-2.3937875E-1,4.4975024E-1,-2.827791E-1,-5.9020407E-2,3.5275424E-1,-4.366121E-1,-1.7206341E-1,2.0222285E-1,1.609107E-1,-5.024402E-2,-3.9410478E-1,6.728495E-1,-8.163666E-1,1.430541E-1,-2.177945E-1,4.5845932E-1,8.363121E-2,9.8864543E-1,5.147324E-1,-2.642208E-1,-9.155499E-2,-4.9861166E-1,-9.1008045E-2,6.873077E-1,-1.8786648E-1,3.0570078E-1,-2.4649437E-1,8.040132E-2,-6.1024476E-2,-6.008027E-1,5.063524E-1,-1.0049229E-1,-9.0297766E-2,2.6111695E-1,2.2789834E-1,-7.981152E-2,1.1789454E-1,-2.5666827E-1,7.0293975E-4,2.6266605E-1,-1.1461095E-1,-3.1682816E-1,7.911369E-2,-2.0853876E-1,2.1344548E-1,-5.2002944E-2,2.0820117E-1,-1.14085086E-1,1.12081235E-2,3.3472794E-1,-1.4008293E-2,1.9535054E-1,-2.1766093E-1,8.280453E-2,2.1347227E-2,-1.5092039E-1,-1.8260558E-1,1.5386084E-1,2.688968E-1,4.458131E-3,-1.8398948E-1,1.3073981E-1,-8.688347E-2,1.8078197E-2,-2.707542E-1,-2.1221426E-3,8.834216E-2,3.5664696E-1,1.6672966E-1,-1.060618E-1,-9.834044E-2,2.1288498E-1,1.02044344E-1,-9.3569286E-2,-8.5723534E-2,1.3817155E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":41,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,-1,41,43,-1,-1,45,47,49,51,53,55,57,59,61,-1,63,65,67,69,71,73,75,77,-1,79,81,-1,-1,-1,83,85,87,89,91,93,-1,95,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.217759E-1,1.6103444E0,9.1995895E-1,1.960331E0,2.1529384E0,1.836951E0,8.603577E-1,2.347134E0,2.5013301E0,2.6633358E0,2.2951765E0,2.00454E-1,1.4446774E0,3.5065956E0,1.6778439E0,2.8835E0,2.848911E0,3.1285348E0,3.440393E0,1.6217928E0,0E0,1.6334834E0,1.1931903E0,0E0,0E0,2.0957215E0,1.3406115E0,4.5143423E0,3.1794453E0,1.625653E0,2.2696617E0,2.0103216E0,7.8909564E-1,2.8527164E-1,0E0,3.779993E0,2.0734446E0,2.8114624E0,1.0882368E0,1.0927525E0,1.1100605E0,1.986303E0,2.0587966E0,0E0,6.178014E-1,1.2656925E0,0E0,0E0,0E0,2.1534262E0,3.4960842E0,2.125071E0,3.1632383E0,3.8215811E0,2.178026E0,0E0,2.3435006E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,21,21,22,22,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,44,44,45,45,49,49,50,50,51,51,52,52,53,53,54,54,56,56],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,-1,42,44,-1,-1,46,48,50,52,54,56,58,60,62,-1,64,66,68,70,72,74,76,78,-1,80,82,-1,-1,-1,84,86,88,90,92,94,-1,96,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.0994488E0,1.3903836E0,-1.0427527E0,-7.3706096E-1,-5.504646E-1,-3.4935325E-1,-1.4228727E-1,1.3943407E-1,-6.625513E-2,8.352457E-1,7.310489E-1,-2.7132162E-1,-2.432298E-1,-9.087947E-3,-9.316682E-2,8.034405E-1,-1.1661501E0,-6.35789E-1,-7.060735E-1,2.9675448E-1,-2.2404924E-1,8.3089806E-2,-1.7664618E0,-2.827791E-1,-5.9020407E-2,-5.9036255E-1,-2.7132162E-1,-1.3702966E-1,5.1640505E-1,-8.0783725E-1,-9.793865E-1,-8.4659064E-1,-3.2037044E-1,-3.445578E-1,1.430541E-1,-3.113201E-1,-4.7465166E-1,-6.004373E-1,-8.0026084E-1,-4.0351355E-1,-1.3329034E0,2.1463482E-1,1.0045053E0,-9.1008045E-2,-2.834341E-2,5.6201607E-1,3.0570078E-1,-2.4649437E-1,8.040132E-2,5.963377E-1,3.4069845E-1,1.8145575E-1,-6.9653356E-1,6.0531694E-1,2.844956E-1,2.2789834E-1,6.004839E-1,1.1789454E-1,-2.5666827E-1,7.0293975E-4,2.6266605E-1,-1.1461095E-1,-3.1682816E-1,7.911369E-2,-2.0853876E-1,2.1344548E-1,-5.2002944E-2,2.0820117E-1,-1.14085086E-1,1.12081235E-2,3.3472794E-1,-1.4008293E-2,1.9535054E-1,-2.1766093E-1,8.280453E-2,2.1347227E-2,-1.5092039E-1,-1.8260558E-1,1.5386084E-1,2.688968E-1,4.458131E-3,-1.8398948E-1,1.3073981E-1,-8.688347E-2,1.8078197E-2,-2.707542E-1,-2.1221426E-3,8.834216E-2,3.5664696E-1,1.6672966E-1,-1.060618E-1,-9.834044E-2,2.1288498E-1,1.02044344E-1,-9.3569286E-2,-8.5723534E-2,1.3817155E-2],"split_indices":[7,10,7,8,8,6,5,5,6,2,5,1,0,3,3,10,7,1,0,0,0,5,7,0,0,0,1,6,3,3,7,5,0,9,0,5,0,4,4,2,7,1,8,0,1,5,0,0,0,9,6,6,1,0,4,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.1451532E2,1.2212017E2,2.9239514E2,5.7788284E1,6.433189E1,1.0926498E1,2.8146866E2,1.5599901E1,4.2188385E1,1.7010147E1,4.7321735E1,3.5098429E0,7.416655E0,1.27578E2,1.5389064E2,8.4301605E0,7.1697407E0,2.5847345E1,1.634104E1,1.5519574E1,1.4905726E0,4.2817825E1,4.5039124E0,2.3898425E0,1.1200004E0,4.286925E0,3.1297302E0,9.459579E1,3.2982216E1,6.2643436E1,9.124721E1,3.6092355E0,4.820925E0,5.614802E0,1.5549389E0,1.4648627E1,1.1198717E1,7.9968076E0,8.344233E0,1.2868967E1,2.6506069E0,2.8130178E1,1.4687645E1,1.0135944E0,3.4903178E0,2.7629664E0,1.5239587E0,1.916417E0,1.2133131E0,7.604363E1,1.8552158E1,1.6117102E1,1.6865112E1,1.7891035E1,4.47524E1,2.34416E0,8.8903046E1,1.3888017E0,2.2204337E0,1.3539325E0,3.4669926E0,2.919313E0,2.695489E0,7.515236E0,7.133391E0,7.906276E0,3.2924414E0,3.239122E0,4.757685E0,1.136078E0,7.2081547E0,2.6445494E0,1.0224418E1,1.2447213E0,1.4058856E0,2.0751938E1,7.3782415E0,1.3605286E1,1.08236E0,2.4087017E0,1.0816162E0,1.6524062E0,1.1105602E0,2.5879835E1,5.0163795E1,1.195632E1,6.595839E0,1.3391182E1,2.72592E0,4.360417E0,1.2504696E1,1.42501955E1,3.6408405E0,3.968534E1,5.067059E0,3.3243397E1,5.5659653E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"97","size_leaf_vector":"1"}},{"base_weights":[-2.6227373E-3,-1.3754807E-2,1.2307353E-1,-3.9276108E-1,-8.309991E-3,4.884922E-1,-2.4557693E-3,-2.8399613E-1,2.0872854E-1,-1.6591711E-1,-5.1880893E-3,7.630391E-1,-2.1951582E-1,-1.3632463E-1,2.6978168E-1,1.1047319E-3,-2.7278337E-1,5.1054034E-2,2.90527E-1,-1.2713185E-1,2.8365834E-2,2.0049402E-1,-3.4416428E-1,-1.5446836E-1,6.160585E-1,1.85567E-1,-3.2858744E-3,-4.8469007E-1,1.6444191E-1,-5.8774024E-1,6.970382E-1,-7.4238443E-1,2.1960448E-2,4.811278E-1,-2.510908E-1,2.4598578E-1,-1.3640489E-2,-2.6203893E-2,5.58091E-3,-2.495118E-1,3.684531E-2,2.1636859E-4,-2.515811E-1,2.559671E-1,1.489184E-2,-2.9875964E-1,1.6422891E-3,1.7214063E-1,-1.09398805E-1,2.103275E-1,4.5929253E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":42,"left_children":[1,3,5,7,9,11,13,-1,-1,-1,15,17,19,21,23,25,27,-1,-1,-1,-1,29,31,33,35,-1,37,39,-1,41,43,45,47,49,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.736075E-1,7.7342683E-1,1.531158E0,3.8594491E0,6.2988037E-1,1.82618E0,9.7778517E-1,0E0,0E0,0E0,6.249609E-1,6.110971E-1,2.4284442E-1,1.3272165E0,1.3948817E0,9.872777E-1,1.7490305E0,0E0,0E0,0E0,0E0,3.3413951E0,1.6904917E0,2.4742649E0,7.2135985E-1,0E0,6.6743517E-1,1.6428332E0,0E0,5.080559E-1,4.685769E-1,1.0535917E0,1.6928732E0,3.0768985E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,10,10,11,11,12,12,13,13,14,14,15,15,16,16,21,21,22,22,23,23,24,24,26,26,27,27,29,29,30,30,31,31,32,32,33,33],"right_children":[2,4,6,8,10,12,14,-1,-1,-1,16,18,20,22,24,26,28,-1,-1,-1,-1,30,32,34,36,-1,38,40,-1,42,44,46,48,50,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9354022E0,-1.7998126E0,2.019251E0,-6.805919E-1,-1.9810368E0,3.9471725E-1,3.2751042E-1,-2.8399613E-1,2.0872854E-1,-1.6591711E-1,1.8515531E0,-1.9863154E-1,-1.9705367E-1,-2.9627237E-1,-5.439832E-1,-1.7664618E0,3.6481115E-1,5.1054034E-2,2.90527E-1,-1.2713185E-1,2.8365834E-2,-8.4659064E-1,-1.31742265E-2,-1.039405E0,1.0346863E0,1.85567E-1,-6.289329E-1,-4.2915997E-1,1.6444191E-1,2.1450245E0,-3.670469E-1,-8.908796E-2,-3.9751104E-1,6.0531694E-1,-2.510908E-1,2.4598578E-1,-1.3640489E-2,-2.6203893E-2,5.58091E-3,-2.495118E-1,3.684531E-2,2.1636859E-4,-2.515811E-1,2.559671E-1,1.489184E-2,-2.9875964E-1,1.6422891E-3,1.7214063E-1,-1.09398805E-1,2.103275E-1,4.5929253E-3],"split_indices":[10,7,10,9,8,9,9,0,0,0,10,5,2,6,6,7,5,0,0,0,0,5,6,6,1,0,0,4,0,10,6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.0772668E2,3.75373E2,3.235368E1,4.3087945E0,3.710642E2,7.533124E0,2.4820555E1,2.942063E0,1.3667313E0,1.1106502E0,3.6995355E2,5.3721995E0,2.160925E0,1.6973383E1,7.847173E0,3.6242654E2,7.5270195E0,1.8527296E0,3.5194697E0,1.0977162E0,1.0632086E0,6.490469E0,1.0482913E1,3.775472E0,4.071701E0,1.571308E0,3.6085522E2,6.3082385E0,1.2187806E0,2.4374695E0,4.0529995E0,4.5291E0,5.9538136E0,2.1076872E0,1.6677848E0,2.929342E0,1.142359E0,7.39341E1,2.869211E2,3.780007E0,2.5282319E0,1.026556E0,1.4109135E0,3.0091202E0,1.043879E0,3.1349201E0,1.3941797E0,2.2533643E0,3.700449E0,1.088575E0,1.0191121E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"51","size_leaf_vector":"1"}},{"base_weights":[-1.560931E-3,2.0151433E-2,-6.1986603E-2,4.9122483E-2,-1.2915613E-1,-2.2270824E-1,4.7210976E-2,1.813532E-2,3.3711454E-1,2.137451E-1,-1.8068138E-1,-3.4103194E-1,2.7076337E-1,1.049424E-1,-5.243181E-1,5.900165E-1,5.1120175E-3,7.10476E-1,1.27539495E-2,-3.6834705E-1,4.6400484E-2,-4.473452E-2,-6.2228996E-1,5.6743956E-1,-3.6202022E-1,-5.20375E-1,1.7223082E-1,-7.010465E-1,1.2938871E-2,2.916598E-1,1.5100668E-2,2.60946E-2,-2.8649136E-1,1.0312525E0,-8.563201E-3,-3.31814E-1,5.005278E-1,-7.228615E-1,-5.2232713E-2,3.8218018E-1,-1.8351074E-1,2.4071276E-1,-5.774363E-1,-8.5395354E-1,9.074813E-3,-1.2444196E-1,8.420199E-1,5.6497883E-2,-1.9814625E-1,1.0999061E-1,-2.3214191E-1,-1.10878825E-1,3.565058E-1,-2.6426038E-1,-3.8321912E-2,-1.4800599E-2,2.1497386E-2,1.899853E-3,2.1639876E-1,3.3281208E-3,-2.539744E-1,1.4876294E-2,3.8511696E-1,1.659044E-1,-1.4778887E-1,-1.6040227E-1,1.2090256E-1,2.1452215E-1,-6.5315865E-2,-2.667933E-1,1.08353E-1,-1.9230027E-1,6.998853E-2,1.7059568E-1,-1.3273826E-1,-1.0883612E-1,1.6230918E-1,1.4239007E-1,-1.9605863E-1,6.0370978E-2,-2.3758407E-1,-2.6353288E-2,-2.76375E-1,9.434711E-2,-1.0920282E-1,3.28775E-1,1.453025E-4,-9.03624E-2,1.0101061E-1,2.0775038E-1,2.249109E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":43,"left_children":[1,3,5,7,9,11,13,15,17,-1,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,-1,-1,55,57,59,61,63,65,67,69,71,73,75,77,79,81,83,-1,85,-1,-1,-1,-1,87,89,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.3354275E-1,1.3006672E0,1.8970506E0,2.23671E0,2.2493482E0,2.641718E0,2.1941254E0,1.6948209E0,2.9407706E0,0E0,2.0147967E0,2.943915E0,1.8953438E0,2.6022763E0,6.434393E-1,1.0986702E0,1.3774188E0,2.616418E0,2.5279114E0,2.8511248E0,1.7761908E0,2.9434245E0,2.672214E0,2.1072085E0,6.9663733E-1,1.6437759E0,2.8914473E0,3.7412715E-1,0E0,0E0,1.4610838E-2,2.8776872E0,2.528315E0,1.8759384E0,1.4901134E0,1.5327034E0,1.1135603E0,2.4996514E0,2.621592E0,1.6826333E0,1.9879191E0,2.9697654E0,1.2833221E0,6.0669994E-1,7.844139E-1,0E0,1.1583836E0,0E0,0E0,0E0,0E0,2.0024474E0,3.1028204E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,43,43,44,44,46,46,51,51,52,52],"right_children":[2,4,6,8,10,12,14,16,18,-1,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,-1,-1,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84,-1,86,-1,-1,-1,-1,88,90,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.9675448E-1,6.768775E-1,-3.6243847E-1,4.6455386E-1,-1.1689172E0,8.361202E-1,1.0045053E0,-1.7322416E0,-5.2874237E-2,2.137451E-1,-2.293364E-1,-3.720675E-1,-6.35789E-1,-1.3590488E0,-2.4030486E-1,3.4227094E-1,2.844956E-1,-5.1429987E-1,7.0561665E-1,8.7190956E-1,-8.6145854E-1,8.034405E-1,1.2684059E0,-3.435213E-1,4.5103574E-1,-1.039405E0,4.6804446E-1,1.4398967E0,1.2938871E-2,2.916598E-1,-3.6642316E-1,1.4176449E-1,-6.186598E-1,-6.359675E-1,-4.7465166E-1,3.9471725E-1,2.1586609E-1,1.066704E0,-4.26067E-1,1.4247327E-1,1.5615591E0,4.4686025E-1,-1.2995528E0,-1.6763757E0,1.376723E0,-1.2444196E-1,-8.6605296E-2,5.6497883E-2,-1.9814625E-1,1.0999061E-1,-2.3214191E-1,9.981511E-1,5.599659E-1,-2.6426038E-1,-3.8321912E-2,-1.4800599E-2,2.1497386E-2,1.899853E-3,2.1639876E-1,3.3281208E-3,-2.539744E-1,1.4876294E-2,3.8511696E-1,1.659044E-1,-1.4778887E-1,-1.6040227E-1,1.2090256E-1,2.1452215E-1,-6.5315865E-2,-2.667933E-1,1.08353E-1,-1.9230027E-1,6.998853E-2,1.7059568E-1,-1.3273826E-1,-1.0883612E-1,1.6230918E-1,1.4239007E-1,-1.9605863E-1,6.0370978E-2,-2.3758407E-1,-2.6353288E-2,-2.76375E-1,9.434711E-2,-1.0920282E-1,3.28775E-1,1.453025E-4,-9.03624E-2,1.0101061E-1,2.0775038E-1,2.249109E-2],"split_indices":[0,6,1,6,0,6,8,8,8,0,4,4,1,8,3,10,4,1,10,5,8,10,9,4,0,6,10,8,0,0,3,4,3,10,0,9,2,8,8,0,6,6,7,6,0,0,4,0,0,0,0,4,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[4.0471652E2,2.9822412E2,1.0649242E2,2.5032335E2,4.7900757E1,4.2661667E1,6.383075E1,2.2696545E2,2.3357893E1,2.0232992E0,4.5877457E1,3.4659893E1,8.001773E0,5.871609E1,5.1146593E0,4.0670686E0,2.2289839E2,1.0305343E1,1.3052549E1,2.4778141E1,2.1099316E1,1.7443113E1,1.7216782E1,5.517974E0,2.483799E0,5.0502415E0,5.3665848E1,3.7204938E0,1.3941658E0,2.0279403E0,2.0391285E0,2.0878555E2,1.4112838E1,6.8259516E0,3.479391E0,7.836507E0,5.216043E0,1.1073158E1,1.3704983E1,8.306128E0,1.2793189E1,1.1714206E1,5.728907E0,1.2337346E1,4.8794346E0,1.0939467E0,4.424027E0,1.0031337E0,1.4806654E0,1.0230118E0,4.02723E0,2.1315783E1,3.2350067E1,2.4237888E0,1.296705E0,1.0128622E0,1.0262662E0,2.0399615E2,4.789398E0,9.856244E0,4.2565937E0,1.6411575E0,5.1847944E0,1.5447968E0,1.9345942E0,6.3545904E0,1.4819162E0,4.019735E0,1.1963078E0,9.754956E0,1.3182015E0,4.0691943E0,9.635789E0,7.0274653E0,1.278662E0,1.0655853E1,2.1373355E0,9.657243E0,2.0569632E0,1.2507105E0,4.4781966E0,1.1823773E0,1.1154969E1,2.769395E0,2.1100397E0,3.166417E0,1.2576103E0,1.5185385E1,6.1303983E0,1.4083061E1,1.8267004E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"91","size_leaf_vector":"1"}},{"base_weights":[-3.1038409E-3,3.8205287E-1,-6.623554E-3,8.2883774E-4,1.577424E-1,3.1282618E-3,-1.2820354E-1,7.818573E-2,-1.9482259E-2,-5.3526904E-3,-6.701496E-1,-4.886736E-1,1.2118716E-1,-3.9125735E-1,-5.538664E-3,-5.990982E-1,1.0020563E-1,-2.552439E-1,-3.746405E-2,-7.3204154E-1,1.1501645E-1,4.097818E-1,-1.2591903E-2,-7.9868454E-1,4.039119E-1,-3.0592922E-2,1.6500665E-1,-2.277421E-1,-2.1390647E-2,-1.7937909E-1,3.4531674E-1,-2.5589865E-1,-6.1342325E-2,-1.371978E-1,1.4969862E-1,1.3894466E-2,-1.9572036E-1,-3.0616447E-1,1.49466805E-2,2.1222016E-1,-2.0314725E-2,2.6179818E-3,-1.0820499E-1,7.8500584E-2,-1.3537188E-1,2.694114E-2,-2.1777602E-1,-7.689847E-2,1.6595548E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":44,"left_children":[1,3,5,-1,-1,7,9,11,13,15,17,19,21,23,25,27,29,-1,-1,31,-1,33,35,37,39,41,43,-1,-1,45,47,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.4281986E-1,1.9626302E-1,4.6981725E-1,0E0,0E0,6.2520385E-1,1.9553123E0,2.1379187E0,1.4660282E0,1.6264848E0,4.2856646E-1,1.5989623E0,3.0963268E0,3.6349225E0,1.175815E0,2.7665293E-1,1.5748887E0,0E0,0E0,1.5740776E-1,0E0,2.1922493E0,2.1286135E0,1.3867102E0,6.387487E-1,3.1148136E0,2.2179997E0,0E0,0E0,1.6793175E0,1.6557847E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,19,19,21,21,22,22,23,23,24,24,25,25,26,26,29,29,30,30],"right_children":[2,4,6,-1,-1,8,10,12,14,16,18,20,22,24,26,28,30,-1,-1,32,-1,34,36,38,40,42,44,-1,-1,46,48,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.3647237E0,3.825173E-1,1.0484438E0,8.2883774E-4,1.577424E-1,6.277426E-2,5.963377E-1,-8.326436E-1,1.3264844E-1,1.1192183E0,1.6043851E0,2.5811973E-1,-2.523423E-1,6.0531694E-1,4.6455386E-1,1.1254588E0,7.8226164E-2,-2.552439E-1,-3.746405E-2,1.9309677E-1,1.1501645E-1,-1.016626E0,1.4398967E0,2.851881E-1,6.2878585E-1,2.699239E-1,7.664258E-1,-2.277421E-1,-2.1390647E-2,-5.9413753E-2,-1.6608919E-1,-2.5589865E-1,-6.1342325E-2,-1.371978E-1,1.4969862E-1,1.3894466E-2,-1.9572036E-1,-3.0616447E-1,1.49466805E-2,2.1222016E-1,-2.0314725E-2,2.6179818E-3,-1.0820499E-1,7.8500584E-2,-1.3537188E-1,2.694114E-2,-2.1777602E-1,-7.689847E-2,1.6595548E-1],"split_indices":[1,8,6,0,0,10,9,7,10,6,5,8,7,0,6,5,2,0,0,9,0,9,8,3,2,6,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.9754892E2,2.6261563E0,3.9492276E2,1.0019045E0,1.6242518E0,3.665004E2,2.842238E1,8.427725E1,2.8222314E2,2.3993336E1,4.429044E0,5.2116323E0,7.906562E1,9.224057E0,2.7299908E2,2.9159806E0,2.1077354E1,2.9058876E0,1.5231564E0,4.2000723E0,1.0115598E0,2.4389116E1,5.4676502E1,6.0961194E0,3.127938E0,2.3874626E2,3.425282E1,1.9011722E0,1.0148083E0,9.97154E0,1.1105815E1,2.914927E0,1.2851453E0,1.890293E0,2.2498823E1,5.0916286E1,3.760215E0,4.6718006E0,1.4243188E0,1.5990496E0,1.5288885E0,2.1420435E2,2.4541916E1,3.010591E1,4.1469107E0,7.2409215E0,2.7306192E0,2.791907E0,8.313908E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"49","size_leaf_vector":"1"}},{"base_weights":[-6.3173133E-3,1.4677487E-2,-6.604793E-2,1.9849893E-2,-1.8567152E-1,-9.9741355E-2,4.2643565E-1,1.1305889E-1,-8.2148155E-3,4.8752046E-1,-1.1968682E-1,5.816406E-1,-5.1550504E-2,-3.2364315E-1,2.3871659E-1,-2.6191872E-1,1.2996988E-2,2.4015607E-1,-3.5334107E-2,-3.9757395E-1,-4.5083556E-2,7.334455E-1,-1.5893145E-2,2.0234582E-1,-8.405367E-1,-2.5887093E-1,3.3802474E-1,-5.751152E-1,2.0203543E-1,3.4524363E-2,-2.8568146E-1,-5.168559E-1,2.1511444E-1,2.5147444E-1,-1.316693E-1,3.68136E-2,3.0594122E-1,1.3613689E-1,-1.0499244E-1,-8.601545E-2,-3.6533016E-1,1.959388E-1,-1.7849022E-1,3.0637646E-1,6.4608596E-2,1.0730715E-1,-2.2001359E-1,1.8048802E-1,-5.457773E-2,2.7252128E-3,1.3473852E-1,-1.932635E-1,2.1857776E-2,1.5366966E-2,-2.0862491E-1,1.6141237E-1,-1.02424495E-1,1.1324961E-1,-1.9037937E-1,-1.9187798E-1,-1.5013881E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":45,"left_children":[1,3,5,7,-1,9,11,13,15,17,19,21,-1,23,25,27,29,-1,-1,31,33,35,-1,37,39,41,43,45,47,49,51,53,55,57,59,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.9853158E-1,9.770688E-1,1.757839E0,7.6733655E-1,0E0,1.2011309E0,7.3218846E-1,3.801549E0,1.2202392E0,7.387987E-1,1.9548978E0,5.8166885E-1,0E0,4.40857E0,2.7202635E0,2.6581748E0,1.3576705E0,0E0,0E0,1.5737648E0,1.9884396E0,6.81828E-1,0E0,1.343825E0,1.3340435E0,3.2792518E0,3.5386739E0,1.8424797E0,1.3046639E0,2.0676868E0,1.872383E0,1.7646365E0,8.967071E-1,2.1995726E0,2.438761E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,7,7,8,8,9,9,10,10,11,11,13,13,14,14,15,15,16,16,19,19,20,20,21,21,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34],"right_children":[2,4,6,8,-1,10,12,14,16,18,20,22,-1,24,26,28,30,-1,-1,32,34,36,-1,38,40,42,44,46,48,50,52,54,56,58,60,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.9675448E-1,2.418105E0,2.349285E0,-8.4839493E-1,-1.8567152E-1,-1.435813E0,-2.0780419E-1,-6.289835E-1,-8.708964E-1,1.4902705E0,9.406206E-3,4.385207E0,-5.1550504E-2,-1.1411514E-1,-8.6145854E-1,5.691137E-1,2.844956E-1,2.4015607E-1,-3.5334107E-2,1.1049571E0,-5.4334486E-1,9.995786E-1,-1.5893145E-2,1.3065345E0,-7.431684E-1,-1.0831622E0,-6.126634E-1,-1.0146359E0,4.6804446E-1,1.4028195E0,1.0045053E0,-9.52706E-1,6.1836034E-1,8.9475995E-1,4.5103574E-1,3.68136E-2,3.0594122E-1,1.3613689E-1,-1.0499244E-1,-8.601545E-2,-3.6533016E-1,1.959388E-1,-1.7849022E-1,3.0637646E-1,6.4608596E-2,1.0730715E-1,-2.2001359E-1,1.8048802E-1,-5.457773E-2,2.7252128E-3,1.3473852E-1,-1.932635E-1,2.1857776E-2,1.5366966E-2,-2.0862491E-1,1.6141237E-1,-1.02424495E-1,1.1324961E-1,-1.9037937E-1,-1.9187798E-1,-1.5013881E-2],"split_indices":[0,2,2,1,0,2,6,4,3,1,2,0,0,5,8,8,4,0,0,3,4,9,0,10,4,10,8,0,10,9,8,7,5,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.957698E2,2.933974E2,1.02372406E2,2.9198242E2,1.4149996E0,9.656345E1,5.808948E0,6.686864E1,2.2511377E2,2.4018614E0,9.416159E1,4.6344604E0,1.1744874E0,1.4589546E1,5.2279087E1,1.6493647E1,2.0862013E2,1.3706071E0,1.0312543E0,1.9012693E1,7.51489E1,3.614193E0,1.0202676E0,7.532767E0,7.05678E0,8.430578E0,4.384851E1,9.703568E0,6.7900777E0,1.9541959E2,1.3200539E1,1.6045305E1,2.967387E0,1.6552364E1,5.859654E1,1.6096735E0,2.0045195E0,5.2988577E0,2.233909E0,3.5722938E0,3.4844863E0,2.0162337E0,6.4143443E0,5.559036E0,3.8289474E1,1.2247554E0,8.478813E0,3.0495224E0,3.7405553E0,1.8508461E2,1.0334978E1,6.20198E0,6.9985585E0,4.0078154E0,1.2037491E1,1.8988168E0,1.0685701E0,1.4993792E1,1.5585725E0,7.1662765E0,5.143026E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"61","size_leaf_vector":"1"}},{"base_weights":[-6.3206283E-3,4.6789248E-2,-2.8416075E-2,2.3748204E-2,5.067954E-1,-4.3910455E-2,1.2018957E-1,3.5859387E-2,-1.840813E-1,7.7960396E-1,-4.624352E-2,-2.962061E-1,-2.8936509E-2,5.6272984E-1,-7.867681E-3,-3.3277269E-3,2.2073476E-1,3.7748355E-2,2.9598904E-1,-7.131024E-1,2.7394572E-1,-1.7562704E-2,-6.4402634E-1,-2.7315432E-2,7.670946E-1,-5.5920064E-1,1.3764656E-1,-3.336082E-2,5.3854704E-1,-1.8794566E-1,3.3721095E-1,-2.6441655E-1,-1.2828833E-1,-1.0591358E-1,5.5088407E-1,2.733541E-1,-2.7455531E-2,-2.2375266E-1,-4.6173837E-2,6.1083943E-2,3.2074517E-1,1.7246848E-2,-2.3608004E-1,2.5213173E-1,-1.9626343E-1,5.0253407E-3,-1.0873582E-1,2.405485E-1,-5.131682E-2,2.6362887E-1,3.7412304E-2,-8.210986E-2,2.502612E-2,2.2404525E-1,-5.873218E-3,1.9365408E-1,-9.247921E-3,-2.3192053E-1,-5.5755544E-3,2.2819078E-2,2.6363558E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":46,"left_children":[1,3,5,7,9,11,13,15,-1,17,-1,19,21,23,25,27,29,-1,-1,31,33,35,37,-1,39,41,43,45,47,-1,49,-1,51,-1,53,55,57,-1,-1,-1,-1,-1,-1,59,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.610748E-1,1.2119675E0,6.437628E-1,8.790031E-1,1.1045369E0,9.4286346E-1,1.5033524E0,7.969146E-1,0E0,4.212227E-1,0E0,3.5234256E0,1.6596154E0,8.648013E-1,1.8018482E0,1.5096978E0,2.1094146E0,0E0,0E0,7.102051E-1,1.3467871E0,6.853723E-1,1.0019529E-1,0E0,5.718787E-1,7.035012E-1,1.7797916E0,1.4538649E0,1.0473485E0,0E0,1.9113028E0,0E0,1.16406605E-1,0E0,5.7277226E-1,9.0711975E-1,1.4982033E0,0E0,0E0,0E0,0E0,0E0,0E0,1.7299027E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,9,9,11,11,12,12,13,13,14,14,15,15,16,16,19,19,20,20,21,21,22,22,24,24,25,25,26,26,27,27,28,28,30,30,32,32,34,34,35,35,36,36,43,43],"right_children":[2,4,6,8,10,12,14,16,-1,18,-1,20,22,24,26,28,30,-1,-1,32,34,36,38,-1,40,42,44,46,48,-1,50,-1,52,-1,54,56,58,-1,-1,-1,-1,-1,-1,60,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.0994488E0,-1.1361345E0,7.4123514E-1,1.1321826E-1,-4.925415E-1,-7.476134E-1,1.678759E-1,-4.0061373E-1,-1.840813E-1,-7.8681755E-1,-4.624352E-2,1.3080368E0,1.0346863E0,-3.9908516E-1,5.963377E-1,1.1105256E0,-9.858561E-1,3.7748355E-2,2.9598904E-1,1.1339923E0,-4.7465166E-1,-1.7471503E0,1.10681206E-1,-2.7315432E-2,3.9471725E-1,3.9466032E-1,1.066704E0,-4.862524E-1,8.3089806E-2,-1.8794566E-1,-1.0002563E0,-2.6441655E-1,-1.5734528E0,-1.0591358E-1,1.9762269E0,1.4119537E-1,-1.711763E0,-2.2375266E-1,-4.6173837E-2,6.1083943E-2,3.2074517E-1,1.7246848E-2,-2.3608004E-1,7.5571007E-1,-1.9626343E-1,5.0253407E-3,-1.0873582E-1,2.405485E-1,-5.131682E-2,2.6362887E-1,3.7412304E-2,-8.210986E-2,2.502612E-2,2.2404525E-1,-5.873218E-3,1.9365408E-1,-9.247921E-3,-2.3192053E-1,-5.5755544E-3,2.2819078E-2,2.6363558E-1],"split_indices":[7,7,4,4,3,2,7,4,0,3,0,1,1,7,9,2,8,0,0,9,0,6,6,0,9,7,8,4,5,0,1,0,2,0,1,7,6,0,0,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.9077722E2,1.14482635E2,2.762946E2,1.10023346E2,4.459285E0,2.5084476E2,2.5449831E1,1.0889777E2,1.1255748E0,3.029356E0,1.4299287E0,1.3001493E1,2.3784326E2,4.949834E0,2.0499996E1,9.066249E1,1.823528E1,1.1152501E0,1.9141059E0,7.3652687E0,5.636225E0,2.3453496E2,3.308301E0,1.3108319E0,3.639002E0,3.687114E0,1.6812883E1,8.679061E1,3.8718846E0,1.6747668E0,1.6560513E1,5.3256373E0,2.039631E0,1.6425843E0,3.993641E0,6.8373895E0,2.2769757E2,2.3072255E0,1.0010755E0,1.8541673E0,1.7848347E0,1.1959882E0,2.4911258E0,1.5284655E1,1.5282285E0,7.614487E1,1.0645741E1,2.7292821E0,1.1426026E0,3.7834148E0,1.2777099E1,1.0355538E0,1.0040772E0,2.742532E0,1.251109E0,2.5704052E0,4.2669845E0,1.6641314E0,2.2603343E2,1.2807552E1,2.4771023E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"61","size_leaf_vector":"1"}},{"base_weights":[-5.169426E-3,-1.5812182E-1,2.8549766E-3,-4.6749207E-1,3.487864E-2,1.835958E-1,-9.358328E-3,4.6728488E-2,-8.427892E-1,-4.5584106E-1,1.8424226E-1,-4.5189008E-1,3.037544E-1,8.056181E-3,-1.4822933E-1,3.5951003E-1,-1.3412943E-1,-2.9590228E-1,-7.0111185E-2,-2.400487E-4,-2.0228441E-1,5.4690385E-1,-1.7342559E-1,-2.1762253E-1,1.2325651E-2,5.9637034E-1,-1.3402086E-1,-1.4614097E-3,4.0835717E-1,-4.391411E-1,3.3228327E-2,1.2296957E-2,1.5186608E-1,2.5045055E-1,9.732482E-2,1.3956779E-1,-5.0287753E-1,7.701025E-1,-4.383325E-2,-6.933857E-1,2.5521228E-1,2.197824E-2,-1.6094325E-1,-1.13098145E-1,7.115451E-1,3.949753E-2,-8.694655E-1,1.3024688E-1,-2.234217E-1,-1.052841E-1,1.403273E-1,-2.4474476E-1,6.86168E-2,3.354171E-1,1.0020847E-1,-1.9695741E-1,1.541005E-1,-2.9311776E-1,-9.065516E-3,1.5376136E-1,-6.5809876E-2,2.4746372E-3,1.2731473E-1,-2.4861738E-2,-2.3525791E-1,-1.2360788E-2,2.8134978E-1,9.275906E-2,-2.1208255E-1,-3.0371904E-1,-2.8325417E-3,7.623696E-2,-9.262752E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":47,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,-1,-1,-1,-1,-1,33,35,-1,-1,37,39,41,43,45,47,-1,-1,-1,49,-1,51,53,55,57,59,61,63,-1,65,67,69,71,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.774918E-1,1.1742018E0,8.1909454E-1,1.4905578E0,1.0275091E0,1.9306351E0,8.4229374E-1,7.97122E-1,1.5416312E-1,3.266554E-1,1.4673499E0,5.8757466E-1,2.6866875E0,1.1816297E0,2.0630686E0,1.230253E-1,0E0,0E0,0E0,0E0,0E0,6.300198E-1,1.5145776E0,0E0,0E0,1.410747E0,2.129738E0,1.1391559E0,1.9832885E0,3.1192067E0,1.9709746E0,0E0,0E0,0E0,7.083401E-1,0E0,1.2158815E0,1.1122632E0,1.5630492E0,7.1342397E-1,8.475789E-1,1.4646568E0,1.834977E0,0E0,9.828081E-1,1.8385863E0,9.6524715E-1,1.3252603E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,21,21,22,22,25,25,26,26,27,27,28,28,29,29,30,30,34,34,36,36,37,37,38,38,39,39,40,40,41,41,42,42,44,44,45,45,46,46,47,47],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,-1,-1,-1,-1,-1,34,36,-1,-1,38,40,42,44,46,48,-1,-1,-1,50,-1,52,54,56,58,60,62,64,-1,66,68,70,72,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.3255169E0,-7.0273864E-1,-1.4286649E0,5.599659E-1,3.9471725E-1,-3.9281076E-1,6.8245757E-1,5.183576E-1,-1.6763757E0,9.324553E-3,1.5389027E-1,5.414116E-1,6.3984674E-1,6.0531694E-1,-1.0559944E-1,1.703107E-1,-1.3412943E-1,-2.9590228E-1,-7.0111185E-2,-2.400487E-4,-2.0228441E-1,5.414116E-1,-1.8002312E0,-2.1762253E-1,1.2325651E-2,7.5571007E-1,1.21692E0,8.65165E-1,-9.226904E-1,-7.657978E-1,1.1694285E0,1.2296957E-2,1.5186608E-1,2.5045055E-1,1.201199E0,1.3956779E-1,1.0346863E0,3.2795903E-1,1.1339923E0,-1.5879076E0,1.4398967E0,7.3075134E-1,1.0417056E0,-1.13098145E-1,-5.9413753E-2,-4.0061373E-1,3.3612394E-1,1.4718864E0,-2.234217E-1,-1.052841E-1,1.403273E-1,-2.4474476E-1,6.86168E-2,3.354171E-1,1.0020847E-1,-1.9695741E-1,1.541005E-1,-2.9311776E-1,-9.065516E-3,1.5376136E-1,-6.5809876E-2,2.4746372E-3,1.2731473E-1,-2.4861738E-2,-2.3525791E-1,-1.2360788E-2,2.8134978E-1,9.275906E-2,-2.1208255E-1,-3.0371904E-1,-2.8325417E-3,7.623696E-2,-9.262752E-2],"split_indices":[5,3,6,2,9,1,0,1,6,8,1,4,1,0,7,4,0,0,0,0,0,4,6,0,0,8,1,9,7,3,4,0,0,0,9,0,1,7,9,6,8,9,2,0,2,4,1,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.8767453E2,1.8392456E1,3.6928207E2,6.5196104E0,1.1872847E1,2.2486027E1,3.4679605E2,3.1200724E0,3.399538E0,2.2917233E0,9.581123E0,3.136604E0,1.9349422E1,3.0899045E2,3.78056E1,2.07671E0,1.0433623E0,2.249893E0,1.149645E0,1.0688422E0,1.2228811E0,4.494643E0,5.08648E0,1.7140858E0,1.4225183E0,1.1380344E1,7.969078E0,3.027877E2,6.2027564E0,1.3977264E1,2.3828337E1,1.0583458E0,1.0183643E0,2.2175162E0,2.277127E0,1.5907152E0,3.495765E0,8.79165E0,2.5886943E0,2.9492755E0,5.019803E0,2.6474002E2,3.8047653E1,1.6598351E0,4.5429215E0,7.047134E0,6.93013E0,2.192651E1,1.9018254E0,1.0541146E0,1.2230126E0,2.3678272E0,1.1279377E0,4.0200443E0,4.7716055E0,1.1486806E0,1.4400138E0,1.7341579E0,1.2151176E0,3.203058E0,1.8167448E0,2.569923E2,7.7477083E0,3.4819046E1,3.2286072E0,1.2390679E0,3.3038535E0,5.6070104E0,1.4401232E0,5.7905917E0,1.1395388E0,1.7429483E1,4.4970264E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"73","size_leaf_vector":"1"}},{"base_weights":[-7.1288715E-3,-4.917619E-2,2.3555852E-2,-1.2134467E-1,1.0355348E-1,6.09481E-2,-8.683824E-2,1.4163661E-1,-1.9675267E-1,-8.857036E-2,3.922204E-1,4.9881583E-1,3.237486E-2,7.044544E-2,-3.0253989E-1,-1.540177E-1,5.392114E-1,2.8195766E-1,-2.481988E-1,-1.6674428E-1,6.781991E-1,6.8433696E-1,-2.2543696E-1,-6.3536856E-3,8.3185863E-1,-4.2167357E-1,6.48542E-2,3.3415267E-1,-2.1967378E-1,-8.580918E-1,-1.5464558E-2,4.0288147E-1,-3.6719194E-1,6.876292E-1,-8.911208E-2,-4.2935258E-1,6.114728E-1,-5.239325E-1,-1.387532E-1,-2.603627E-1,-8.489234E-2,-1.0925113E-3,2.9898778E-1,-1.04224645E-1,8.358033E-1,-2.1996914E-1,1.1478111E-1,1.6982958E-1,-3.5379946E-1,5.439215E-2,9.3514705E-1,-5.933272E-1,1.4330667E-1,-6.5535925E-2,1.3996486E-1,6.5963435E-1,-1.0369378E-1,-5.0769997E-1,5.539155E-1,-3.3551687E-1,-4.317593E-1,1.2901981E-1,-1.7753527E-1,-4.9666837E-2,3.195446E-1,1.8854162E-1,-1.6371924E-1,-2.7601078E-2,2.673998E-1,7.41013E-3,-2.0621267E-1,-4.7858395E-2,2.4430539E-1,-2.3154113E-1,-7.2799146E-2,-5.4862957E-2,1.2050291E-1,3.8687926E-2,-1.0797877E-1,-8.915564E-2,2.935192E-1,1.837892E-1,-6.731061E-2,5.4249227E-2,-2.1583508E-1,3.2635507E-1,7.581996E-2,-2.340274E-1,7.344536E-2,-9.433273E-2,1.851198E-2,-1.7060116E-2,7.222949E-2,5.969466E-2,3.0433604E-1,1.4929102E-1,-1.8066347E-1,5.9774484E-2,-2.2774017E-1,3.372952E-1,-1.1203378E-1,-2.0303984E-1,-1.7738653E-2,9.78285E-2,-1.1548315E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":48,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,57,59,61,63,65,67,-1,69,71,73,75,-1,77,-1,-1,-1,79,-1,81,-1,83,-1,85,87,-1,89,91,93,95,97,99,-1,101,103,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.9799237E-1,1.8068163E0,9.270311E-1,2.229258E0,2.9616864E0,2.0780673E0,1.9520509E0,3.059154E0,2.204946E0,2.0613317E0,3.9926088E0,1.7187276E0,2.3603776E0,2.6534085E0,3.8442066E0,1.9641228E0,1.5720477E0,2.3440022E0,2.3055806E0,1.6650981E0,6.9937193E-1,2.5861626E0,1.394178E0,1.2025045E0,2.4338865E-1,1.9344201E0,1.4636062E0,2.6424465E0,3.9934292E0,4.295416E-1,1.4890212E0,1.9501401E0,2.4026837E0,1.6163034E0,0E0,4.1518623E-1,1.1437435E0,1.3487935E0,1.4511602E0,0E0,1.7073648E0,0E0,0E0,0E0,2.4077644E0,0E0,1.050115E0,0E0,8.633941E-1,0E0,2.5265026E-1,1.6665504E0,0E0,1.7600868E0,1.8895613E0,1.513742E0,2.8674855E0,2.4019656E0,3.0638683E0,0E0,3.9330918E-1,1.5826243E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,35,35,36,36,37,37,38,38,40,40,44,44,46,46,48,48,50,50,51,51,53,53,54,54,55,55,56,56,57,57,58,58,60,60,61,61],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,-1,70,72,74,76,-1,78,-1,-1,-1,80,-1,82,-1,84,-1,86,88,-1,90,92,94,96,98,100,-1,102,104,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-6.6069925E-1,1.9577834E-1,6.465325E-1,-8.4839493E-1,-6.35789E-1,-1.2854533E0,-8.894858E-2,-9.6988404E-1,-9.858561E-1,8.367388E-1,3.760857E-1,-5.9413753E-2,-1.0917766E0,-4.2915997E-1,-4.2915997E-1,-6.805919E-1,-2.293364E-1,9.406206E-3,-1.1507303E-1,-1.1102537E0,-6.7486215E-1,-9.236573E-1,5.056718E-1,-6.289329E-1,-1.4834465E0,9.157797E-1,-4.26067E-1,7.8226164E-2,-2.432298E-1,5.884093E-1,-3.9281076E-1,-5.4334486E-1,-1.6330593E0,-8.6145854E-1,-8.911208E-2,-5.0859594E-1,-8.747628E-1,-3.670469E-1,2.0748878E0,-2.603627E-1,-8.4839493E-1,-1.0925113E-3,2.9898778E-1,-1.04224645E-1,-7.2091955E-1,-2.1996914E-1,-5.1429987E-1,1.6982958E-1,-9.236573E-1,5.439215E-2,-6.7486215E-1,2.1586609E-1,1.4330667E-1,-6.805919E-1,-4.5355532E-1,1.9592093E-1,4.6804446E-1,-6.7486215E-1,-6.35789E-1,-3.3551687E-1,-3.9281076E-1,9.324553E-3,-1.7753527E-1,-4.9666837E-2,3.195446E-1,1.8854162E-1,-1.6371924E-1,-2.7601078E-2,2.673998E-1,7.41013E-3,-2.0621267E-1,-4.7858395E-2,2.4430539E-1,-2.3154113E-1,-7.2799146E-2,-5.4862957E-2,1.2050291E-1,3.8687926E-2,-1.0797877E-1,-8.915564E-2,2.935192E-1,1.837892E-1,-6.731061E-2,5.4249227E-2,-2.1583508E-1,3.2635507E-1,7.581996E-2,-2.340274E-1,7.344536E-2,-9.433273E-2,1.851198E-2,-1.7060116E-2,7.222949E-2,5.969466E-2,3.0433604E-1,1.4929102E-1,-1.8066347E-1,5.9774484E-2,-2.2774017E-1,3.372952E-1,-1.1203378E-1,-2.0303984E-1,-1.7738653E-2,9.78285E-2,-1.1548315E-1],"split_indices":[3,5,5,1,1,9,0,1,8,0,6,2,0,4,4,9,4,2,8,8,8,8,5,0,8,3,8,2,0,6,1,4,7,8,0,6,5,6,9,0,1,0,0,0,6,0,1,0,8,0,8,2,0,9,1,8,10,8,1,0,1,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.8407983E2,1.6178416E2,2.2229568E2,1.1000814E2,5.1776024E1,1.6638599E2,5.59097E1,2.4318928E1,8.568921E1,3.1502523E1,2.02735E1,9.184322E0,1.5720166E2,3.2722656E1,2.3187042E1,1.429852E1,1.0020409E1,7.8804436E0,7.780877E1,2.9298056E1,2.204468E0,1.3690651E1,6.582849E0,4.038907E0,5.1454153E0,9.694434E0,1.4750723E2,1.7062092E1,1.5660563E1,7.2219496E0,1.5965092E1,3.7118137E0,1.0586706E1,8.660983E0,1.3594251E0,2.3989499E0,5.4814935E0,2.103265E1,5.677612E1,2.0588927E0,2.7239162E1,1.0164746E0,1.1879933E0,1.5869621E0,1.2103689E1,2.177555E0,4.405294E0,1.287804E0,2.751103E0,1.0825342E0,4.0628815E0,8.426433E0,1.2680012E0,5.3960575E1,9.354665E1,9.496295E0,7.5657973E0,1.1662164E1,3.9983997E0,3.476245E0,3.7457047E0,1.3385464E1,2.5796285E0,2.4010012E0,1.3108125E0,1.2265384E0,9.3601675E0,1.9077618E0,6.7532215E0,1.1969322E0,1.2020178E0,1.1864177E0,4.295076E0,1.0253035E1,1.0779614E1,5.3102203E1,3.673915E0,1.5622874E1,1.1616288E1,1.2318467E0,1.0871842E1,1.4582734E0,2.9470208E0,1.3226569E0,1.428446E0,2.8344893E0,1.228392E0,6.9475694E0,1.4788638E0,1.7755941E1,3.6204636E1,3.1829388E1,6.1717262E1,4.8110437E0,4.685251E0,3.4300778E0,4.1357193E0,3.114049E0,8.548115E0,2.3441663E0,1.6542336E0,1.7672765E0,1.9784281E0,9.939696E0,3.4457667E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"105","size_leaf_vector":"1"}},{"base_weights":[-5.9624403E-3,1.1805331E-1,-1.5165019E-2,-1.0982592E-1,3.650318E-1,-7.5425364E-2,1.3851445E-2,3.044163E-1,-4.7962242E-1,-6.042075E-2,6.821568E-1,-2.4303053E-1,2.451912E-2,3.7504897E-1,-1.1090967E-2,6.565471E-1,-2.0418684E-1,-6.9879365E-1,7.079437E-2,1.3209204E-1,-3.6076126E-1,3.245963E-1,2.662741E-1,-4.8999682E-2,-5.1004183E-1,3.0157995E-1,-3.853876E-2,7.6121074E-1,-2.2552399E-2,-5.477658E-1,-4.1814084E-4,5.673642E-2,2.664555E-1,-2.0667304E-1,7.0046395E-2,-3.0602458E-1,-1.7278245E-1,-2.2790405E-1,3.0411318E-2,-1.396877E-1,2.1467218E-1,2.9849714E-1,-3.3551338E-1,-2.4317288E-1,-8.859753E-1,-9.719801E-2,7.8596175E-1,8.388975E-2,-4.4883403E-1,-9.103943E-2,1.1054364E0,3.17904E-1,-5.3757817E-1,-2.1771184E-1,-3.349596E-2,2.3424779E-1,-1.0482767E-2,-1.5660214E-1,1.1011841E-1,-1.7253396E-1,1.3213566E-1,-1.7910662E-1,1.3486144E-1,-2.0528191E-1,4.5407705E-2,-1.9112603E-1,3.3293396E-2,-7.460771E-2,-3.125907E-1,-8.3803475E-2,1.8418698E-1,3.0832773E-1,-3.814321E-3,8.125654E-2,-3.287901E-2,-2.0336778E-1,4.0412985E-2,4.4438817E-2,3.9044902E-1,-1.5538009E-1,2.0518978E-1,-3.0832303E-1,9.31852E-2,-8.131215E-2,4.2999513E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":49,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,-1,-1,37,-1,39,41,43,45,47,49,51,53,55,-1,-1,-1,-1,-1,57,-1,-1,59,-1,61,63,65,67,69,71,73,75,-1,77,79,81,-1,-1,-1,83,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.319606E-1,1.5119215E0,6.1613446E-1,2.347326E0,1.7656556E0,1.9237595E0,2.1509075E0,1.4574933E0,1.4165984E0,1.1178601E0,9.430752E-1,2.1954613E0,1.2805028E0,2.3848226E0,1.2820333E0,3.3048224E-1,9.640335E-1,8.989315E-1,0E0,0E0,9.131794E-1,0E0,1.0038602E0,2.6815994E0,1.5834832E0,2.6992426E0,3.0540628E0,3.2129874E0,1.6734174E0,2.576604E-1,1.741966E0,0E0,0E0,0E0,0E0,0E0,8.733834E-1,0E0,0E0,1.1222941E0,0E0,1.9248047E0,2.5965474E0,1.7259269E0,4.4331884E-1,1.2514759E0,1.2183669E0,1.7332466E0,1.9570086E0,0E0,9.6125555E-1,2.1249423E0,1.8915447E0,0E0,0E0,0E0,1.4178848E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,20,20,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,36,36,39,39,41,41,42,42,43,43,44,44,45,45,46,46,47,47,48,48,50,50,51,51,52,52,56,56],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,-1,-1,38,-1,40,42,44,46,48,50,52,54,56,-1,-1,-1,-1,-1,58,-1,-1,60,-1,62,64,66,68,70,72,74,76,-1,78,80,82,-1,-1,-1,84,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.6897553E0,-6.289835E-1,-3.8474053E-1,-1.4228727E-1,-1.7998126E0,1.2588996E-1,-2.9627237E-1,9.406206E-3,5.6201607E-1,-4.9090233E-1,-7.447781E-1,-4.2400864E-1,-4.7465166E-1,9.406206E-3,-1.0719677E0,-3.3469358E-1,-8.2178444E-1,1.8934776E0,7.079437E-2,1.3209204E-1,8.3089806E-2,3.245963E-1,1.8934776E0,-6.133851E-1,-4.5355532E-1,-1.2823372E-1,-1.039405E0,-6.575297E-1,7.1523346E-2,4.2232597E-1,-9.874513E-1,5.673642E-2,2.664555E-1,-2.0667304E-1,7.0046395E-2,-3.0602458E-1,-6.133851E-1,-2.2790405E-1,3.0411318E-2,-1.7397814E0,2.1467218E-1,-1.2128284E0,-4.2915997E-1,9.406206E-3,-7.146222E-1,7.126889E-1,-2.1057709E-1,2.753794E-1,-5.5560064E-1,-9.103943E-2,-3.3469358E-1,-8.6145854E-1,8.8010764E-1,-2.1771184E-1,-3.349596E-2,2.3424779E-1,-5.3669715E-1,-1.5660214E-1,1.1011841E-1,-1.7253396E-1,1.3213566E-1,-1.7910662E-1,1.3486144E-1,-2.0528191E-1,4.5407705E-2,-1.9112603E-1,3.3293396E-2,-7.460771E-2,-3.125907E-1,-8.3803475E-2,1.8418698E-1,3.0832773E-1,-3.814321E-3,8.125654E-2,-3.287901E-2,-2.0336778E-1,4.0412985E-2,4.4438817E-2,3.9044902E-1,-1.5538009E-1,2.0518978E-1,-3.0832303E-1,9.31852E-2,-8.131215E-2,4.2999513E-3],"split_indices":[7,4,6,5,7,9,6,2,5,6,3,5,0,2,5,2,0,10,0,0,5,0,10,9,1,2,6,4,8,2,5,0,0,0,0,0,9,0,0,7,0,5,4,2,4,4,1,1,3,0,2,8,8,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.7592426E2,2.5151356E1,3.5077292E2,1.3370314E1,1.1781042E1,1.1348726E2,2.3728564E2,6.3895764E0,6.980737E0,5.3769026E0,6.4041395E0,4.1858917E1,7.162834E1,1.4420601E1,2.2286504E2,3.6036856E0,2.7858906E0,5.3619876E0,1.6187499E0,1.8412111E0,3.5356915E0,2.4484897E0,3.9556499E0,2.4927906E1,1.6931013E1,1.2578577E1,5.9049767E1,6.851612E0,7.568989E0,3.364419E0,2.1950063E2,1.7959871E0,1.8076986E0,1.049524E0,1.7363667E0,2.745658E0,2.6163294E0,1.5520673E0,1.9836242E0,2.4400544E0,1.5155954E0,1.124619E1,1.3681715E1,1.0864988E1,6.066025E0,7.337311E0,5.2412663E0,4.609189E1,1.2957877E1,1.7029312E0,5.1486807E0,4.787179E0,2.78181E0,1.9178673E0,1.4465517E0,1.8177867E0,2.1768283E2,1.6087018E0,1.0076277E0,1.3988262E0,1.0412282E0,1.1969253E0,1.0049265E1,7.735281E0,5.9464345E0,4.765509E0,6.0994797E0,1.7029965E0,4.363028E0,6.324597E0,1.0127139E0,3.803068E0,1.4381983E0,2.3237598E1,2.285429E1,9.18917E0,3.768707E0,1.1736428E0,3.975038E0,1.3316689E0,3.45551E0,1.6288366E0,1.1529734E0,1.8066765E1,1.9961607E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"85","size_leaf_vector":"1"}},{"base_weights":[-5.1899804E-3,1.489758E-2,-7.448265E-2,1.8985309E-3,2.632785E-1,-1.9253571E-1,-5.710377E-2,-1.3014614E-2,1.8263406E-1,3.932339E-1,-1.2870094E-1,3.6014E-1,-8.437098E-2,1.5329969E-2,-1.7971128E-1,2.9022744E-1,-2.029849E-1,-1.15473025E-1,5.930803E-1,-2.0823777E-2,1.8071955E-1,-5.542288E-2,-2.2263266E-1,-2.8378753E-2,1.368725E-1,3.9390447E-3,-4.5662305E-1,3.6231658E-1,-1.5448415E-1,-1.5245262E-1,8.2279E-2,1.0198009E0,3.197496E-1,-1.3971063E-1,8.929756E-2,1.4416362E-2,-3.740474E-2,9.020952E-2,-2.4993088E-2,-3.0382695E-2,1.8042448E-1,-2.5109085E-1,-4.0105317E-2,1.311809E-1,-6.29445E-2,3.7032112E-1,9.8459795E-2,-1.0051026E-1,2.472225E-1,-7.3788464E-2,5.7167638E-2,7.869208E-2,-1.0812784E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":50,"left_children":[1,3,5,7,9,-1,11,13,15,17,-1,19,21,23,25,27,-1,29,31,-1,-1,33,-1,35,37,39,41,43,-1,-1,-1,45,47,49,51,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[5.1827914E-1,9.3221456E-1,7.8826237E-1,7.4499947E-1,1.4605589E0,0E0,9.6649945E-1,1.2097595E0,2.1440926E0,1.3506306E0,0E0,5.8679867E-1,1.4448421E0,1.1686478E0,1.8937378E0,1.3139814E0,0E0,8.1189865E-1,7.4901676E-1,0E0,0E0,9.375156E-1,0E0,1.1990874E0,2.1230965E0,1.5293596E0,1.7112806E0,8.813982E-1,0E0,0E0,0E0,6.1231613E-2,2.612551E0,1.7422435E0,2.3136847E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,6,6,7,7,8,8,9,9,11,11,12,12,13,13,14,14,15,15,17,17,18,18,21,21,23,23,24,24,25,25,26,26,27,27,31,31,32,32,33,33,34,34],"right_children":[2,4,6,8,10,-1,12,14,16,18,-1,20,22,24,26,28,-1,30,32,-1,-1,34,-1,36,38,40,42,44,-1,-1,-1,46,48,50,52,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.345772E-1,1.2452005E-1,-1.1510396E0,1.1105256E0,1.0972867E0,-1.9253571E-1,-1.0610008E0,2.9675448E-1,1.1339923E0,-5.3669715E-1,-1.2870094E-1,5.6201607E-1,1.6889015E0,-3.435213E-1,-4.0061373E-1,6.3984674E-1,-2.029849E-1,9.995786E-1,-5.461783E-1,-2.0823777E-2,1.8071955E-1,4.822475E-1,-2.2263266E-1,8.3089806E-2,1.2588996E-1,3.3612394E-1,2.8468606E-1,9.138794E-1,-1.5448415E-1,-1.5245262E-1,8.2279E-2,-1.9705367E-1,1.3264844E-1,1.4546254E0,1.8931966E0,1.4416362E-2,-3.740474E-2,9.020952E-2,-2.4993088E-2,-3.0382695E-2,1.8042448E-1,-2.5109085E-1,-4.0105317E-2,1.311809E-1,-6.29445E-2,3.7032112E-1,9.8459795E-2,-1.0051026E-1,2.472225E-1,-7.3788464E-2,5.7167638E-2,7.869208E-2,-1.0812784E-1],"split_indices":[7,7,9,2,5,0,1,0,9,5,0,5,5,4,4,1,0,9,9,0,0,6,0,5,9,1,2,0,0,0,0,2,10,2,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.70551E2,2.8788092E2,8.2670074E1,2.7452142E2,1.3359472E1,1.3893132E0,8.128076E1,2.5445354E2,2.0067896E1,1.1611694E1,1.7477776E0,4.2370753E0,7.7043686E1,2.1825089E2,3.6202663E1,1.8423214E1,1.6446817E0,3.3941398E0,8.217555E0,1.7851945E0,2.4518807E0,7.4834366E1,2.209318E0,1.610876E2,5.7163277E1,2.2359451E1,1.3843211E1,1.7414104E1,1.0091097E0,1.5392652E0,1.8548746E0,2.1421857E0,6.0753694E0,4.7313034E1,2.7521334E1,9.0088295E1,7.0999306E1,3.256659E1,2.4596685E1,1.9717688E1,2.6417637E0,5.6257086E0,8.217503E0,1.560541E1,1.808695E0,1.0358999E0,1.1062858E0,2.7894304E0,3.285939E0,3.5989975E1,1.1323058E1,2.017624E1,7.3450913E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"53","size_leaf_vector":"1"}},{"base_weights":[-5.243482E-3,3.2132745E-3,-1.521584E-1,-1.03469E-3,1.9263865E-1,2.4388207E-1,-3.0028847E-1,-4.6531945E-3,1.7129005E-1,-1.0827614E-1,1.6353077E-1,-2.1965735E-2,-6.582232E-1,2.6286896E-3,-4.2541972E-1,-1.23590894E-1,9.253708E-2,2.5610974E-1,-3.4834942E-1,-9.304736E-3,-2.5880846E-1,5.265609E-2,-4.65503E-2,-2.449468E-1,9.68004E-2,-1.4100508E-1,2.2480442E-1,-2.1231492E-1,3.179781E-2,-1.3095588E-2,4.0111892E-2,5.4934863E-2,-3.1265277E-2,-2.295569E-1,1.6279814E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":51,"left_children":[1,3,5,7,-1,9,11,13,-1,15,-1,17,19,21,23,-1,-1,25,27,-1,-1,29,31,-1,-1,33,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.5919123E-1,9.5082325E-1,1.2533457E0,7.245372E-1,0E0,6.984167E-1,1.4886574E0,1.0660613E0,0E0,5.8435094E-1,0E0,9.5348305E-1,8.1526256E-1,8.454728E-1,1.9950633E0,0E0,0E0,1.2256866E0,8.59469E-1,0E0,0E0,1.3353904E0,2.3145914E0,0E0,0E0,2.1261694E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,7,7,9,9,11,11,12,12,13,13,14,14,17,17,18,18,21,21,22,22,25,25],"right_children":[2,4,6,8,-1,10,12,14,-1,16,-1,18,20,22,24,-1,-1,26,28,-1,-1,30,32,-1,-1,34,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.1722991E0,1.1546056E0,4.2115542E-1,2.618582E0,1.9263865E-1,1.3943407E-1,-3.638682E-1,1.0484438E0,1.7129005E-1,7.664258E-1,1.6353077E-1,-2.1014415E-1,-8.4839493E-1,7.8226164E-2,3.53506E-1,-1.23590894E-1,9.253708E-2,1.148385E0,1.1254588E0,-9.304736E-3,-2.5880846E-1,7.1523346E-2,-8.1500554E-1,-2.449468E-1,9.68004E-2,4.9114594E-1,2.2480442E-1,-2.1231492E-1,3.179781E-2,-1.3095588E-2,4.0111892E-2,5.4934863E-2,-3.1265277E-2,-2.295569E-1,1.6279814E-1],"split_indices":[6,6,5,5,0,5,8,6,0,2,0,9,1,2,2,0,0,7,5,0,0,8,9,0,0,2,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.6819962E2,3.4908368E2,1.9115923E1,3.4776984E2,1.3138404E0,5.027635E0,1.4088287E1,3.465693E2,1.200546E0,2.6131415E0,2.4144938E0,8.522632E0,5.5656557E0,3.4165036E2,4.9189286E0,1.5181922E0,1.0949495E0,4.718138E0,3.8044934E0,1.6514856E0,3.91417E0,1.6932924E2,1.7232114E2,3.1702282E0,1.7487005E0,3.0093672E0,1.7087708E0,1.812893E0,1.9916004E0,7.759192E1,9.173732E1,3.4147892E1,1.3817323E2,1.5107821E0,1.4985852E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"35","size_leaf_vector":"1"}},{"base_weights":[-4.0542963E-3,-9.870077E-3,1.6090927E-1,-2.6735392E-1,5.3505143E-3,5.857321E-1,-9.270173E-2,-4.86576E-1,1.0327427E-1,3.5919362E-1,-1.1333284E-3,8.572562E-1,-6.2211722E-2,-2.8101087E-1,1.414996E-1,-6.6724336E-1,-1.4355153E-1,4.439405E-1,-2.976075E-1,-2.339636E-2,5.712759E-1,5.393511E-2,-3.4065984E-2,7.243296E-2,3.0491102E-1,1.4172082E-1,-2.1137404E-1,-8.3096606E-1,5.4891083E-2,-4.1520712E-1,9.8176114E-2,2.537634E-1,-1.13609806E-1,-2.0157723E-1,4.8054602E-2,2.4952012E-1,1.50092365E-2,-9.31753E-4,2.3850656E-1,-1.8112196E-2,-5.787697E-1,1.7589895E-1,-1.8031745E-1,-5.2324906E-2,-2.799633E-1,-1.9625469E-1,2.5372389E-3,-9.591563E-3,3.3216663E-2,-5.2691832E-2,3.4043144E-2,2.1490532E-1,4.3465517E-2,-9.82879E-3,1.4520314E-1,-2.1812105E-1,-1.9149775E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":52,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,-1,25,-1,27,29,31,33,-1,35,37,39,-1,-1,41,-1,43,-1,45,-1,-1,-1,-1,47,-1,-1,49,51,53,55,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.5343352E-1,1.393444E0,1.4060891E0,1.6625798E0,7.720754E-1,1.2172232E0,1.0592593E0,6.878054E-1,1.2519832E0,6.060076E-1,6.0265094E-1,1.2602448E-1,0E0,1.3827888E0,0E0,1.2667267E0,8.567217E-1,1.9188008E0,5.971651E-1,0E0,5.2826035E-1,1.2596624E0,1.7964046E0,0E0,0E0,1.8116374E0,0E0,3.001027E-1,0E0,4.1200435E-1,0E0,0E0,0E0,0E0,1.9603383E-2,0E0,0E0,1.9560621E0,1.1713012E0,1.5094899E0,4.0294683E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,13,13,15,15,16,16,17,17,18,18,20,20,21,21,22,22,25,25,27,27,29,29,34,34,37,37,38,38,39,39,40,40],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,-1,26,-1,28,30,32,34,-1,36,38,40,-1,-1,42,-1,44,-1,46,-1,-1,-1,-1,48,-1,-1,50,52,54,56,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.6019539E0,-8.3161515E-1,-1.4369774E-1,-2.9512849E-2,-8.0366546E-1,8.7190956E-1,2.349285E0,-2.3947062E-1,2.1586609E-1,6.948162E-1,-2.3947062E-1,7.6534563E-1,-6.2211722E-2,-2.834341E-2,1.414996E-1,1.4907846E0,1.0130565E0,-1.0831622E0,4.9114594E-1,-2.339636E-2,8.015383E-1,-9.8929316E-2,2.3024104E0,7.243296E-2,3.0491102E-1,5.353284E-1,-2.1137404E-1,-1.9705367E-1,5.4891083E-2,1.4704613E-1,9.8176114E-2,2.537634E-1,-1.13609806E-1,-2.0157723E-1,1.6935092E0,2.4952012E-1,1.50092365E-2,6.4971733E-1,-3.2037044E-1,1.7624261E0,1.3372214E-1,1.7589895E-1,-1.8031745E-1,-5.2324906E-2,-2.799633E-1,-1.9625469E-1,2.5372389E-3,-9.591563E-3,3.3216663E-2,-5.2691832E-2,3.4043144E-2,2.1490532E-1,4.3465517E-2,-9.82879E-3,1.4520314E-1,-2.1812105E-1,-1.9149775E-2],"split_indices":[7,10,4,4,10,5,2,8,2,7,8,6,0,1,0,6,6,10,2,0,7,7,0,0,0,6,0,2,0,2,0,0,0,0,3,0,0,10,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.6573785E2,3.5419104E2,1.1546816E1,1.8844078E1,3.3534695E2,3.8268702E0,7.7199454E0,1.164397E1,7.200108E0,5.055484E0,3.3029147E2,2.7906036E0,1.0362666E0,6.164987E0,1.5549582E0,7.007838E0,4.636132E0,3.8343215E0,3.3657863E0,1.8579088E0,3.197575E0,1.2336633E2,2.0692513E2,1.0898846E0,1.700719E0,3.4185803E0,2.746407E0,5.895249E0,1.1125889E0,3.0147223E0,1.6214095E0,2.556823E0,1.2774986E0,1.1627609E0,2.2030256E0,1.7349708E0,1.4626043E0,9.586412E1,2.7502214E1,2.0204082E2,4.884308E0,2.2702763E0,1.148304E0,1.1589704E0,4.7362785E0,1.5796118E0,1.4351107E0,1.1826282E0,1.0203974E0,3.7723457E1,5.814066E1,3.415898E0,2.4086317E1,1.9722125E2,4.819574E0,3.4723275E0,1.4119804E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"57","size_leaf_vector":"1"}},{"base_weights":[-3.0455254E-3,-2.5281394E-1,9.191799E-4,4.5439836E-2,-5.09077E-1,3.0395937E-1,-3.2254613E-3,-2.3196144E-1,2.5261452E-2,1.9816524E-1,-6.0194943E-2,2.860109E-2,-3.7059084E-2,3.8346034E-2,-1.6714431E-1,-4.0821636E-1,-2.4091884E-2,-3.5094562E-1,5.2766193E-2,-2.3333646E-1,1.2602903E-1,-3.4276095E-1,-6.0343724E-3,-1.7540082E-1,8.504435E-2,7.129419E-2,1.9647703E-3,8.6765654E-2,-1.6344984E-1,3.8347058E-2,-1.0876731E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":53,"left_children":[1,3,5,-1,7,9,11,-1,-1,-1,-1,13,15,17,-1,19,21,23,25,-1,-1,27,29,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.6044905E-1,6.663504E-1,4.5197323E-1,0E0,6.4652437E-1,1.0061861E0,3.8356084E-1,0E0,0E0,0E0,0E0,1.0693821E0,8.209404E-1,1.0375822E0,0E0,2.0913498E0,9.6380776E-1,1.1466011E0,1.5016036E0,0E0,0E0,1.3047982E0,6.5135807E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,11,11,12,12,13,13,15,15,16,16,17,17,18,18,21,21,22,22],"right_children":[2,4,6,-1,8,10,12,-1,-1,-1,-1,14,16,18,-1,20,22,24,26,-1,-1,28,30,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.4382055E0,1.9885693E-1,-1.8002312E0,4.5439836E-2,2.753794E-1,3.825173E-1,-6.525501E-1,-2.3196144E-1,2.5261452E-2,1.9816524E-1,-6.0194943E-2,1.1722991E0,-8.498767E-1,-9.155495E-1,-1.6714431E-1,-1.4369774E-1,-5.791787E-1,7.9795814E-1,4.6804446E-1,-2.3333646E-1,1.2602903E-1,-6.7486215E-1,-3.5239425E-1,-1.7540082E-1,8.504435E-2,7.129419E-2,1.9647703E-3,8.6765654E-2,-1.6344984E-1,3.8347058E-2,-1.0876731E-2],"split_indices":[5,4,6,0,1,8,7,0,0,0,0,6,3,6,0,4,7,9,10,0,0,8,7,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.627603E2,4.687557E0,3.5807275E2,1.9772285E0,2.7103286E0,3.8552306E0,3.5421753E2,1.6655583E0,1.0447702E0,2.077847E0,1.7773834E0,1.8260225E2,1.7161528E2,1.805333E2,2.0689483E0,4.764395E0,1.6685089E2,5.61488E0,1.7491841E2,3.336661E0,1.4277343E0,7.9833574E0,1.5886754E2,4.160555E0,1.4543256E0,3.415332E1,1.4076509E2,1.8297001E0,6.153657E0,2.866661E1,1.3020093E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"31","size_leaf_vector":"1"}},{"base_weights":[-3.6827112E-3,3.1225285E-1,-6.604045E-3,3.1236786E-2,1.185255E-1,6.605419E-2,-1.980284E-2,-2.4803786E-1,1.6606353E-1,-4.5862556E-1,-7.7546174E-3,-4.6516395E-1,3.532066E-1,2.8835922E-1,-7.397116E-2,8.742238E-2,-6.4883643E-1,4.7128467E-4,-4.2722413E-1,1.8266192E-1,-6.4794147E-1,-3.3267965E-3,4.8262265E-1,5.839342E-1,8.8423304E-2,-7.447856E-1,2.0816767E-1,-2.6809642E-1,9.937393E-2,-4.9391766E-3,4.4118947E-1,-2.4194477E-1,1.2373308E-1,1.405606E-1,-5.990484E-2,2.243268E-3,-2.5805584E-1,4.358841E-2,1.7671257E-1,-4.649412E-2,2.0165102E-1,-8.919883E-2,7.271732E-2,-3.875623E-2,-2.7716294E-1,-5.352727E-2,1.5688494E-1,-4.768922E-3,6.322971E-2,-4.8394393E-2,2.3887233E-1,1.2653182E-1,-6.1532706E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":54,"left_children":[1,3,5,-1,-1,7,9,11,13,15,17,19,21,23,25,-1,27,29,31,33,35,-1,37,39,41,43,45,-1,-1,47,49,-1,51,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.3664477E-1,1.6478717E-2,3.4671247E-1,0E0,0E0,1.7876961E0,1.6120095E0,1.9696008E0,1.2768775E0,1.3515507E0,1.0299144E0,1.3724678E0,2.0512778E-1,1.6348944E0,3.037909E0,0E0,1.9299881E0,7.0246136E-1,1.2993369E0,4.3357313E-1,1.186173E0,0E0,2.7463317E-2,8.4331775E-1,1.1531484E0,4.019184E-1,1.4932437E0,0E0,0E0,6.91396E-1,8.9272344E-1,0E0,4.008725E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,17,17,18,18,19,19,20,20,22,22,23,23,24,24,25,25,26,26,29,29,30,30,32,32],"right_children":[2,4,6,-1,-1,8,10,12,14,16,18,20,22,24,26,-1,28,30,32,34,36,-1,38,40,42,44,46,-1,-1,48,50,-1,52,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.3647237E0,1.9577834E-1,-1.039405E0,3.1236786E-2,1.185255E-1,5.963377E-1,-8.44775E-1,-4.925415E-1,5.7910216E-1,-1.519667E0,1.2224419E0,-1.9175527E0,-1.3255169E0,2.7459797E-1,8.524527E-1,8.742238E-2,8.3488894E-1,1.0681607E0,-1.4266019E-3,3.4824587E-2,-1.8179249E0,-3.3267965E-3,1.1388365E0,-8.1802267E-1,6.414551E-1,6.3984674E-1,-1.5879076E0,-2.6809642E-1,9.937393E-2,6.094744E-1,1.4704613E-1,-2.4194477E-1,1.9577834E-1,1.405606E-1,-5.990484E-2,2.243268E-3,-2.5805584E-1,4.358841E-2,1.7671257E-1,-4.649412E-2,2.0165102E-1,-8.919883E-2,7.271732E-2,-3.875623E-2,-2.7716294E-1,-5.352727E-2,1.5688494E-1,-4.768922E-3,6.322971E-2,-4.8394393E-2,2.3887233E-1,1.2653182E-1,-6.1532706E-2],"split_indices":[1,5,6,0,0,9,6,3,1,7,0,2,5,7,1,0,7,0,5,10,6,0,10,1,7,1,6,0,0,1,2,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.6151306E2,2.342019E0,3.5917105E2,1.3092731E0,1.032746E0,5.459972E1,3.0457132E2,1.2828871E1,4.1770847E1,7.148311E0,2.9742303E2,9.591459E0,3.237411E0,2.7538755E1,1.4232093E1,1.3384703E0,5.8098407E0,2.926823E2,4.740697E0,2.1107771E0,7.4806824E0,1.0882857E0,2.1491253E0,1.0336782E1,1.7201973E1,3.7281775E0,1.0503915E1,4.7191997E0,1.0906409E0,2.901096E2,2.5727274E0,2.5331717E0,2.2075253E0,1.0787704E0,1.0320067E0,2.0658946E0,5.414788E0,1.0826484E0,1.066477E0,1.0219795E0,9.314803E0,4.641638E0,1.2560334E1,1.2281035E0,2.5000741E0,4.908654E0,5.595261E0,2.7696674E2,1.3142863E1,1.156265E0,1.4164625E0,1.0097544E0,1.1977708E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"53","size_leaf_vector":"1"}},{"base_weights":[-3.21512E-3,-2.8754896E-1,-7.151371E-5,-1.5782855E-1,5.685176E-2,3.791667E-3,-2.685306E-1,-7.126082E-4,1.5726028E-1,4.8324052E-2,-5.5298465E-1,1.2064295E-2,-9.010037E-2,-4.7391623E-2,-1.99861E-1,2.094876E-3,5.9696805E-1,-1.9435078E-1,-2.3836056E-2,2.8809821E-3,-1.0539091E-1,-7.4093446E-2,2.600513E-1,-1.0933562E-1,1.6683921E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":55,"left_children":[1,3,5,-1,-1,7,9,11,-1,-1,13,15,17,-1,-1,19,21,-1,23,-1,-1,-1,-1,-1,-1],"loss_changes":[3.209929E-1,5.4659176E-1,3.7045112E-1,0E0,0E0,8.2583183E-1,7.0831156E-1,4.0096423E-1,0E0,0E0,4.1190982E-2,1.7901927E0,1.6072109E0,0E0,0E0,8.07567E-1,1.3859236E0,0E0,1.1103418E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,5,5,6,6,7,7,10,10,11,11,12,12,15,15,16,16,18,18],"right_children":[2,4,6,-1,-1,8,10,12,-1,-1,14,16,18,-1,-1,20,22,-1,24,-1,-1,-1,-1,-1,-1],"split_conditions":[-2.1240125E0,1.7677041E0,2.4972763E0,-1.5782855E-1,5.685176E-2,2.248481E0,-1.3231984E0,7.6534563E-1,1.5726028E-1,4.8324052E-2,3.2751042E-1,7.2995836E-1,8.1842655E-1,-4.7391623E-2,-1.99861E-1,6.768775E-1,-7.269058E-1,-1.9435078E-1,8.3089806E-2,2.8809821E-3,-1.0539091E-1,-7.4093446E-2,2.600513E-1,-1.0933562E-1,1.6683921E-2],"split_indices":[2,10,8,0,0,8,0,6,0,0,9,6,6,0,0,6,1,0,5,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.5814227E2,2.9270222E0,3.5521524E2,1.8827666E0,1.0442556E0,3.5114804E2,4.0672154E0,3.491249E2,2.023135E0,1.7929883E0,2.2742271E0,3.0621954E2,4.2905357E1,1.0402266E0,1.2340004E0,3.020744E2,4.1451397E0,3.6242356E0,3.9281124E1,2.9674255E2,5.3318543E0,1.0248902E0,3.1202495E0,6.750972E0,3.253015E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"25","size_leaf_vector":"1"}},{"base_weights":[-3.5283577E-3,-2.6560325E-2,3.4561973E-2,1.4413309E-2,-1.4457108E-1,3.4311038E-1,5.6719235E-3,-7.5813285E-3,4.6863097E-1,5.8110353E-2,-2.8489792E-1,-2.437213E-1,6.3621134E-1,-4.617112E-2,2.119514E-1,3.8874263E-3,-1.7676425E-1,-1.07317366E-1,7.5100493E-1,2.52169E-1,-6.057075E-1,-3.530392E-1,2.6313353E-1,-3.1550497E-2,7.564611E-1,4.346526E-2,-2.32861E-1,-2.010869E-1,2.9999995E-1,-1.249853E-2,4.7701225E-1,3.0927438E-1,3.617597E-2,4.9649927E-1,-2.0994392E-1,-2.7072722E-1,1.2156852E-1,-6.248741E-1,-1.6400675E-1,-6.806442E-2,1.8624657E-1,8.7981385E-1,-2.5129387E-2,-7.136008E-2,3.074162E-1,-6.4846885E-1,-1.1104221E-1,3.4534387E-2,6.00008E-1,2.5876032E-3,-1.544671E-1,2.4882464E-1,-4.123535E-2,-1.0718864E-1,1.847019E-1,4.189454E-2,-1.7839737E-1,-2.4492781E-1,3.5149932E-2,-1.5227892E-1,-1.5315025E-2,8.8670604E-2,3.1367657E-1,1.12224504E-1,-5.8359362E-2,2.6899174E-1,-1.2633388E-3,7.0898235E-2,-2.792203E-1,-6.545643E-2,8.891431E-2,-1.7318423E-1,7.575003E-2,2.0596458E-1,-2.542088E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":56,"left_children":[1,3,5,7,9,11,13,15,17,19,21,-1,23,25,27,29,-1,-1,31,33,35,37,39,-1,41,43,45,-1,47,49,51,-1,-1,53,55,-1,-1,57,59,-1,-1,61,-1,63,65,67,69,71,73,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.1515065E-1,1.0841291E0,1.205046E0,1.6720998E0,1.6594372E0,4.4374304E0,1.3447214E0,1.0714948E0,2.072338E0,3.303912E0,1.373831E0,0E0,9.527497E-1,1.6932182E0,2.1625276E0,1.2322686E0,0E0,0E0,9.4923496E-1,2.3211288E0,2.1136904E0,1.4871345E0,9.0614074E-1,0E0,9.598241E-1,2.1069865E0,1.5877817E0,0E0,1.8627875E0,1.6372693E0,1.2660934E0,0E0,0E0,1.5570593E0,1.0908786E0,0E0,0E0,1.8486147E0,7.348709E-1,0E0,0E0,4.2143488E-1,0E0,2.7475414E0,3.867723E0,2.0242412E0,1.2240678E0,1.960492E0,7.124729E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,12,12,13,13,14,14,15,15,18,18,19,19,20,20,21,21,22,22,24,24,25,25,26,26,28,28,29,29,30,30,33,33,34,34,37,37,38,38,41,41,43,43,44,44,45,45,46,46,47,47,48,48],"right_children":[2,4,6,8,10,12,14,16,18,20,22,-1,24,26,28,30,-1,-1,32,34,36,38,40,-1,42,44,46,-1,48,50,52,-1,-1,54,56,-1,-1,58,60,-1,-1,62,-1,64,66,68,70,72,74,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.8775555E-1,5.599659E-1,-1.1418654E-1,4.9114594E-1,-3.638682E-1,-8.2178444E-1,6.2878585E-1,1.1722991E0,-1.7727183E-1,1.9914937E-1,1.4398967E0,-2.437213E-1,-6.7486215E-1,-3.1497508E-1,-1.4834465E0,8.8920105E-1,-1.7676425E-1,-1.07317366E-1,-6.225345E-1,-1.4983252E-1,1.3858054E0,-3.9281076E-1,-4.8561495E-2,-3.1550497E-2,1.7677041E0,2.5811973E-1,-3.9751104E-1,-2.010869E-1,3.7022063E-1,1.379008E0,5.691137E-1,3.0927438E-1,3.617597E-2,-8.0026084E-1,1.2684474E0,-2.7072722E-1,1.2156852E-1,4.4932756E-1,-1.3255169E0,-6.806442E-2,1.8624657E-1,2.5842196E-1,-2.5129387E-2,3.4824587E-2,7.529394E-2,-4.0351355E-1,3.7022063E-1,4.1130644E-1,1.201199E0,2.5876032E-3,-1.544671E-1,2.4882464E-1,-4.123535E-2,-1.0718864E-1,1.847019E-1,4.189454E-2,-1.7839737E-1,-2.4492781E-1,3.5149932E-2,-1.5227892E-1,-1.5315025E-2,8.8670604E-2,3.1367657E-1,1.12224504E-1,-5.8359362E-2,2.6899174E-1,-1.2633388E-3,7.0898235E-2,-2.792203E-1,-6.545643E-2,8.891431E-2,-1.7318423E-1,7.575003E-2,2.0596458E-1,-2.542088E-2],"split_indices":[3,2,3,2,8,0,2,6,8,6,8,0,8,4,8,6,0,0,7,1,2,1,6,0,10,8,0,0,10,5,8,0,0,4,7,0,0,5,5,0,0,10,0,10,6,2,10,3,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.5718347E2,2.2277904E2,1.3440443E2,1.6601581E2,5.676324E1,1.0575946E1,1.2382849E2,1.5928598E2,6.7298155E0,2.3461891E1,3.330135E1,1.7813672E0,8.794579E0,9.957823E1,2.4250257E1,1.5718E2,2.105991E0,1.6460048E0,5.083811E0,1.8634466E1,4.8274245E0,2.993508E1,3.3662708E0,1.2448828E0,7.5496955E0,6.779476E1,3.178347E1,1.6005106E0,2.2649748E1,1.5285953E2,4.320473E0,3.0806828E0,2.0031283E0,1.2140879E1,6.493587E0,3.8150265E0,1.0123979E0,1.1332675E1,1.8602404E1,1.5747055E0,1.7915653E0,6.542135E0,1.0075608E0,4.7751358E1,2.0043404E1,6.2244296E0,2.5559042E1,1.2608279E1,1.0041469E1,1.4763481E2,5.2247124E0,2.5234303E0,1.7970426E0,1.242334E0,1.0898545E1,3.7358239E0,2.7577631E0,8.927792E0,2.4048831E0,3.738123E0,1.4864282E1,2.061097E0,4.481038E0,9.90248E0,3.7848877E1,6.284131E0,1.3759273E1,1.5448024E0,4.679627E0,2.0604778E1,4.954263E0,2.8789015E0,9.729378E0,8.912445E0,1.1290238E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"75","size_leaf_vector":"1"}},{"base_weights":[-4.165362E-3,9.18413E-2,-1.4699043E-2,1.8125445E-1,-2.3567592E-1,-7.14463E-2,2.0328213E-2,3.98375E-1,1.9800523E-3,-4.8999292E-1,2.5496912E-1,1.1307662E-1,-1.5319136E-1,6.977261E-3,4.881645E-1,-6.880698E-2,4.733019E-1,-1.7950769E-1,1.4421576E-1,-1.9491775E-1,3.413922E-2,-3.868512E-2,1.5893397E-1,2.96324E-1,-2.5276086E-1,-2.3080032E-1,1.6116479E-1,1.7389301E-2,-4.432683E-1,1.9846565E-1,-8.046338E-3,5.853635E-1,-3.4685113E-2,-2.4201144E-1,4.232949E-1,4.8908332E-1,-8.1675656E-2,-6.2507564E-1,1.8172644E-2,-4.9577278E-1,-8.842425E-2,-2.1227585E-1,4.0319255E-1,-3.2856858E-1,3.3679567E-2,9.949476E-2,-2.55446E-1,2.2384557E-1,-3.717729E-2,2.5641123E-2,-2.108416E-1,1.7713062E-1,-1.189017E-3,-9.237939E-3,2.361326E-1,-9.575633E-2,2.1163394E-1,-1.95968E-2,-2.6703566E-1,6.249863E-2,-1.4404455E-1,-6.823583E-2,-2.4842903E-1,5.761589E-2,-6.521587E-2,-1.4791235E-1,5.0343316E-2,2.1153097E-1,2.5495876E-2,-1.577602E-1,6.81548E-2,-1.131296E-1,1.3866509E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":57,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,-1,31,-1,33,-1,-1,-1,-1,35,37,39,41,43,45,-1,-1,47,-1,49,51,53,55,57,59,61,63,65,67,69,71,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.5848168E-1,1.0692223E0,6.3570166E-1,1.0930178E0,1.1171864E0,1.8576453E0,1.2338173E0,7.060747E-1,1.4864236E0,6.1206484E-1,4.2412496E-1,2.624492E0,2.1103115E0,9.152963E-1,5.0719094E-1,0E0,8.382218E-1,0E0,1.618158E0,0E0,0E0,0E0,0E0,1.9197791E0,1.3461101E0,2.5426712E0,1.6594976E0,1.082904E0,1.6785684E0,0E0,0E0,1.2299702E0,0E0,1.046196E0,6.112797E-1,2.693716E0,2.0142999E0,7.7289176E-1,9.17846E-1,1.9290814E0,1.6989053E0,8.834137E-1,9.882809E-1,1.0674498E0,9.5788234E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,18,18,23,23,24,24,25,25,26,26,27,27,28,28,31,31,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,43,43,44,44],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,-1,32,-1,34,-1,-1,-1,-1,36,38,40,42,44,46,-1,-1,48,-1,50,52,54,56,58,60,62,64,66,68,70,72,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.4109713E0,9.9497205E-1,-1.4228727E-1,3.2031852E-1,1.6019539E0,-6.9653356E-1,1.9404742E0,-5.750444E-1,-3.0570334E-1,1.0346863E0,-1.6232948E0,3.53506E-1,9.423064E-1,1.60444E0,9.3145706E-2,-6.880698E-2,6.3984674E-1,-1.7950769E-1,-1.2128284E0,-1.9491775E-1,3.413922E-2,-3.868512E-2,1.5893397E-1,-4.2400864E-1,5.599659E-1,-3.3206618E-1,-3.3469358E-1,-6.0997343E-1,-4.862524E-1,1.9846565E-1,-8.046338E-3,1.5234454E0,-3.4685113E-2,6.976058E-1,-8.747628E-1,-5.9706414E-1,1.4742326E0,-5.4334486E-1,-2.8642884E-1,-2.1014415E-1,-1.2995528E0,7.3075134E-1,-4.5244622E-1,1.1561755E0,-1.2460577E0,9.949476E-2,-2.55446E-1,2.2384557E-1,-3.717729E-2,2.5641123E-2,-2.108416E-1,1.7713062E-1,-1.189017E-3,-9.237939E-3,2.361326E-1,-9.575633E-2,2.1163394E-1,-1.95968E-2,-2.6703566E-1,6.249863E-2,-1.4404455E-1,-6.823583E-2,-2.4842903E-1,5.761589E-2,-6.521587E-2,-1.4791235E-1,5.0343316E-2,2.1153097E-1,2.5495876E-2,-1.577602E-1,6.81548E-2,-1.131296E-1,1.3866509E-2],"split_indices":[6,7,5,8,7,1,9,1,7,1,6,2,8,9,1,0,1,0,5,0,0,0,0,5,2,1,2,2,4,0,0,2,0,2,5,6,10,4,4,9,7,9,7,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.5213992E2,3.4053036E1,3.180869E2,2.7100971E1,6.952062E0,1.2100628E2,1.9708063E2,1.1704008E1,1.5396964E1,4.579625E0,2.372437E0,3.7031616E1,8.397466E1,1.9259921E2,4.481416E0,1.0282593E0,1.0675749E1,2.3349502E0,1.3062014E1,3.561382E0,1.0182431E0,1.2111658E0,1.1612712E0,2.4799566E1,1.2232051E1,6.7560875E1,1.641378E1,1.8918562E2,3.413586E0,3.1397188E0,1.3416973E0,8.974152E0,1.7015972E0,5.5348988E0,7.527115E0,1.622952E1,8.570046E0,4.6015363E0,7.6305146E0,2.2746258E1,4.481462E1,6.5029206E0,9.91086E0,7.645604E0,1.8154002E2,1.2425092E0,2.1710768E0,7.2733774E0,1.700774E0,3.7116084E0,1.8232905E0,5.136022E0,2.391093E0,6.2404075E0,9.989113E0,7.040178E0,1.5298682E0,1.8792181E0,2.7223184E0,5.944189E0,1.6863252E0,1.3516997E1,9.229261E0,1.3961269E1,3.0853352E1,3.5692105E0,2.9337103E0,4.461741E0,5.4491186E0,5.682143E0,1.963461E0,4.5174804E0,1.7702254E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"73","size_leaf_vector":"1"}},{"base_weights":[-5.5471677E-3,1.7063567E-1,-1.0788911E-2,-1.7038321E-1,4.6011102E-1,-2.1114185E-1,-2.0838003E-3,9.845914E-2,-5.073185E-1,1.1550489E-1,2.2969742E-1,-6.3192457E-1,8.768551E-4,2.5649833E-2,-4.8418332E-2,-2.2131753E-1,-2.2606852E-2,-7.509584E-2,1.23409905E-1,-4.883173E-2,-2.2674346E-1,2.0752329E-1,-5.587729E-1,6.1494583E-4,2.4102378E-1,-1.7461797E-2,-2.8036496E-1,5.2049136E-1,-2.9460824E-1,-1.98196E-1,-5.229079E-2,1.0308106E-2,-1.9575623E-1,4.7484052E-1,-3.7702844E-2,2.4657878E-1,-5.183534E-2,-4.979484E-1,1.596967E-2,-6.3138343E-3,2.7857682E-1,3.5002667E-2,-1.9004133E-1,-6.0286564E-3,1.169507E-1,1.37275E-2,2.1929994E-1,9.35984E-2,-1.783812E-1,1.727919E-2,1.797603E-1,-1.9531545E-1,-5.560346E-3,-2.045028E-1,7.681076E-2,-1.3653159E-1,1.2889758E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":58,"left_children":[1,3,5,7,9,11,13,-1,15,17,-1,19,21,23,25,-1,-1,-1,-1,-1,-1,27,29,31,33,35,37,39,41,-1,-1,43,-1,45,47,49,51,53,55,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.234478E-1,1.0775087E0,5.910257E-1,1.0697078E0,5.1641595E-1,1.2594573E0,4.2017776E-1,0E0,3.0282366E-1,5.226295E-1,0E0,1.518488E-1,1.3166109E0,1.1030098E0,8.7408996E-1,0E0,0E0,0E0,0E0,0E0,0E0,1.4594779E0,5.116403E-3,1.1734806E0,1.4062436E0,1.0026917E0,9.330771E-1,1.2309638E0,6.076665E-1,0E0,0E0,2.1146631E0,0E0,1.2479644E0,2.2585292E0,8.142736E-1,1.9301319E0,1.3249245E0,1.568189E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,8,8,9,9,11,11,12,12,13,13,14,14,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,31,31,33,33,34,34,35,35,36,36,37,37,38,38],"right_children":[2,4,6,8,10,12,14,-1,16,18,-1,20,22,24,26,-1,-1,-1,-1,-1,-1,28,30,32,34,36,38,40,42,-1,-1,44,-1,46,48,50,52,54,56,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-9.42992E-1,-3.9281076E-1,-8.2880706E-1,9.406206E-3,-1.9011056E-1,-6.35789E-1,-2.8569296E-1,9.845914E-2,-4.8826578E-1,-9.494192E-1,2.2969742E-1,-8.1500554E-1,2.1586609E-1,-4.5244622E-1,1.1818031E0,-2.2131753E-1,-2.2606852E-2,-7.509584E-2,1.23409905E-1,-4.883173E-2,-2.2674346E-1,2.5811973E-1,-2.432298E-1,1.041898E0,2.3453663E-1,-1.0831622E0,1.6043851E0,-3.445578E-1,1.7677041E0,-1.98196E-1,-5.229079E-2,3.0620784E-1,-1.9575623E-1,-3.3165962E-1,1.2588996E-1,-9.66626E-4,-8.3161515E-1,6.0531694E-1,3.53506E-1,-6.3138343E-3,2.7857682E-1,3.5002667E-2,-1.9004133E-1,-6.0286564E-3,1.169507E-1,1.37275E-2,2.1929994E-1,9.35984E-2,-1.783812E-1,1.727919E-2,1.797603E-1,-1.9531545E-1,-5.560346E-3,-2.045028E-1,7.681076E-2,-1.3653159E-1,1.2889758E-1],"split_indices":[4,1,4,2,6,1,7,0,8,9,0,9,2,7,5,0,0,0,0,0,0,8,0,3,6,10,5,9,10,0,0,3,0,6,9,4,10,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.4724585E2,9.121033E0,3.3812482E2,4.3765764E0,4.7444563E0,1.3111082E1,3.2501373E2,1.7753842E0,2.6011925E0,2.8773317E0,1.8671244E0,3.7292721E0,9.381809E0,2.0359683E2,1.2141691E2,1.2347416E0,1.3664508E0,1.3553768E0,1.5219549E0,1.2624252E0,2.466847E0,7.3113427E0,2.070467E0,1.8329378E2,2.030306E1,1.0806888E2,1.3348026E1,4.481533E0,2.8298094E0,1.0688792E0,1.0015876E0,1.8158313E2,1.7106504E0,1.0658395E1,9.644665E0,1.1737062E1,9.633182E1,7.3044105E0,6.043616E0,2.3334813E0,2.1480517E0,1.5744982E0,1.2553113E0,1.6899234E2,1.259078E1,4.4249573E0,6.2334375E0,6.194617E0,3.4500477E0,8.399072E0,3.33799E0,4.0950384E0,9.223678E1,5.950355E0,1.354055E0,2.7790017E0,3.264614E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"57","size_leaf_vector":"1"}},{"base_weights":[-5.485427E-3,-3.4792084E-2,3.4737296E-2,-1.386566E-1,3.452252E-2,1.3234925E-2,4.7901824E-1,5.183354E-2,-2.666065E-1,6.0261923E-1,-1.5413458E-2,-2.1917336E-1,4.6134114E-2,7.298925E-1,-7.206824E-2,2.77024E-1,-1.4034605E-1,-3.7697725E-2,-6.626429E-1,8.29534E-2,9.777465E-1,-9.432198E-2,1.765304E-1,3.8747332E-1,-4.0507048E-1,-1.3352709E-3,3.0994523E-1,2.978315E-1,6.1999064E-2,-3.723059E-1,4.328327E-1,1.24681585E-1,-3.6581314E-1,-4.1148797E-1,9.925627E-2,-7.6664513E-1,3.036822E-2,-2.0721465E-1,1.8811367E-1,3.433174E-1,1.11294895E-1,-2.7123928E-1,-5.1178683E-2,6.502901E-1,3.584728E-2,1.9285E-1,-5.165467E-2,-5.243064E-1,8.6575344E-2,2.9091876E-2,-4.5171353E-1,5.5067785E-2,6.5252894E-1,-1.3425225E-2,-1.4525293E-1,-1.555758E-1,1.788873E-1,1.3410726E-1,-7.291677E-2,-1.6436753E-1,6.8332492E-3,3.2159034E-2,-2.3317601E-1,-1.4872183E-1,7.031468E-2,-2.4930736E-1,1.1149037E-3,-1.5438579E-1,9.991042E-2,3.0058555E-3,-1.1464771E-1,-5.1410682E-3,2.4174961E-1,-1.1687566E-1,3.6123775E-2,-2.1281284E-1,-1.8121371E-2,1.6091213E-1,2.1879086E-3,1.14429E-2,-2.2224572E-1,-2.03215E-1,9.944271E-2,2.604524E-1,-4.0389325E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":59,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,-1,29,31,33,35,37,39,41,43,45,47,49,51,-1,-1,53,55,57,59,61,63,65,-1,67,-1,-1,-1,-1,69,71,73,-1,-1,75,-1,77,79,81,83,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[4.0807673E-1,1.445894E0,1.3882585E0,1.9714249E0,3.4377446E0,1.0826461E0,1.3862333E0,1.4717448E0,4.345448E0,1.8273187E0,1.7139835E0,2.1382015E0,1.5495635E0,5.1323104E-1,0E0,1.7375458E0,1.152731E0,1.6590708E0,1.475657E0,9.504113E-1,1.4096785E-1,2.7511368E0,2.1759057E0,7.7264374E-1,1.3039701E0,1.4634876E0,1.6116419E0,0E0,0E0,1.08523846E-1,2.3354015E0,1.206601E0,7.387835E-1,1.7282923E0,2.0409732E0,7.770586E-1,0E0,8.5909754E-1,0E0,0E0,0E0,0E0,1.5650803E0,7.920487E-1,1.0002959E0,0E0,0E0,1.008626E0,0E0,1.1027637E0,9.8535395E-1,2.6361012E0,1.4676201E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,29,29,30,30,31,31,32,32,33,33,34,34,35,35,37,37,42,42,43,43,44,44,47,47,49,49,50,50,51,51,52,52],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,-1,30,32,34,36,38,40,42,44,46,48,50,52,-1,-1,54,56,58,60,62,64,66,-1,68,-1,-1,-1,-1,70,72,74,-1,-1,76,-1,78,80,82,84,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[5.868313E-2,-4.8561495E-2,1.6325573E0,-4.803529E-1,4.5194067E-3,-9.236573E-1,-5.750444E-1,-2.3947062E-1,-1.0194073E0,-5.461783E-1,1.0410126E0,-1.607844E0,2.0110925E-1,7.6534563E-1,-7.206824E-2,-7.7400047E-1,-6.805919E-1,-8.1802267E-1,1.2452005E-1,2.5811973E-1,-5.2874237E-2,3.9906673E-2,1.10681206E-1,-2.9598737E-2,8.361202E-1,6.414902E-1,6.2878585E-1,2.978315E-1,6.1999064E-2,-9.858561E-1,-8.0026084E-1,9.406206E-3,-3.874429E-1,-1.4228727E-1,-3.113201E-1,6.1836034E-1,3.036822E-2,-1.1418654E-1,1.8811367E-1,3.433174E-1,1.11294895E-1,-2.7123928E-1,1.2663195E0,-1.016626E0,-9.3749535E-1,1.9285E-1,-5.165467E-2,7.595982E-1,8.6575344E-2,-7.992597E-1,-3.9751104E-1,-5.504646E-1,8.7150747E-1,-1.3425225E-2,-1.4525293E-1,-1.555758E-1,1.788873E-1,1.3410726E-1,-7.291677E-2,-1.6436753E-1,6.8332492E-3,3.2159034E-2,-2.3317601E-1,-1.4872183E-1,7.031468E-2,-2.4930736E-1,1.1149037E-3,-1.5438579E-1,9.991042E-2,3.0058555E-3,-1.1464771E-1,-5.1410682E-3,2.4174961E-1,-1.1687566E-1,3.6123775E-2,-2.1281284E-1,-1.8121371E-2,1.6091213E-1,2.1879086E-3,1.14429E-2,-2.2224572E-1,-2.03215E-1,9.944271E-2,2.604524E-1,-4.0389325E-2],"split_indices":[9,6,5,5,6,8,1,8,7,9,10,8,3,6,0,6,9,1,7,8,8,6,6,5,6,6,2,0,0,8,4,2,3,5,5,5,0,3,0,0,0,0,5,9,0,0,0,0,0,8,0,8,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.441564E2,1.9917247E2,1.4498393E2,7.931798E1,1.19854485E2,1.3927316E2,5.710772E0,3.2109165E1,4.7208813E1,8.789773E0,1.1106471E2,1.6568321E1,1.2270484E2,4.2229843E0,1.4877877E0,1.45815E1,1.7527664E1,3.0610912E1,1.65979E1,4.196908E0,4.5928655E0,7.906821E1,3.199651E1,3.6318839E0,1.2936439E1,1.04835915E2,1.7868923E1,2.213493E0,2.0094912E0,2.552886E0,1.2028614E1,8.262468E0,9.265196E0,7.6706743E0,2.2940239E1,1.460568E1,1.9922208E0,3.140965E0,1.0559428E0,2.9081085E0,1.6847568E0,2.9899395E0,7.607826E1,6.496556E0,2.5499952E1,2.3918884E0,1.2399955E0,1.1247232E1,1.6892054E0,9.907772E1,5.7581954E0,1.0911593E1,6.957329E0,1.0063453E0,1.5465407E0,1.4450458E0,1.0583569E1,4.2881002E0,3.9743679E0,6.0298805E0,3.2353156E0,3.4645655E0,4.2061086E0,3.7517157E0,1.9188522E1,1.3406545E1,1.1991357E0,2.0321198E0,1.1088454E0,6.50249E1,1.1053363E1,1.3960335E0,5.100522E0,3.6301916E0,2.186976E1,7.661565E0,3.585668E0,3.1095548E0,9.596817E1,2.4592834E0,3.298912E0,2.592112E0,8.319482E0,5.3804216E0,1.5769074E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"85","size_leaf_vector":"1"}},{"base_weights":[-5.314539E-3,-1.2249557E-1,-2.824472E-3,-3.689226E-2,2.3810998E-2,5.3733903E-1,-4.9768493E-2,7.083988E-2,-5.5872206E-2,4.6257887E-2,2.0218648E-1,-6.706475E-1,-3.6046203E-2,-9.402338E-2,1.343214E-1,-3.9810708E-1,-3.2353415E-3,-2.5519313E-2,-2.8240848E-1,5.9569395E-1,-5.328574E-2,5.8951914E-1,-2.0366378E-1,-2.0730269E-1,1.591446E-1,-5.96417E-1,7.35548E-2,3.5008556E-1,-5.0007496E-2,4.9818493E-2,2.4582201E-1,2.1271696E-3,-5.628244E-2,3.2628973E-3,2.4188648E-1,-1.2976514E-1,6.80632E-2,1.6552451E-1,3.2527026E-2,5.7534877E-2,-2.3904806E-1,2.4151179E-1,-1.3572921E-1,1.9788949E-1,-2.8877337E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":60,"left_children":[1,-1,3,5,7,9,11,13,15,-1,-1,17,19,21,23,25,27,-1,-1,29,31,33,35,-1,37,39,-1,41,43,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.389426E-1,0E0,3.074321E-1,1.1256822E0,7.1673787E-1,7.114005E-2,1.2221377E0,1.26907E0,1.2804837E0,0E0,0E0,4.1235828E-1,1.6019545E0,2.6654334E0,1.8941851E0,1.3551748E0,1.0532786E0,0E0,0E0,2.3992848E-1,1.146969E0,6.12308E-1,3.0556889E0,0E0,1.6399515E0,1.3430626E0,0E0,3.0850496E0,1.9088483E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,2,2,3,3,4,4,5,5,6,6,7,7,8,8,11,11,12,12,13,13,14,14,15,15,16,16,19,19,20,20,21,21,22,22,24,24,25,25,27,27,28,28],"right_children":[2,-1,4,6,8,10,12,14,16,-1,-1,18,20,22,24,26,28,-1,-1,30,32,34,36,-1,38,40,-1,42,44,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.9810368E0,-1.2249557E-1,-1.4228727E-1,-1.9132047E0,3.295149E-2,-7.146222E-1,-1.7998126E0,-1.3762592E0,2.2212896E-1,4.6257887E-2,2.0218648E-1,-5.6167686E-1,-1.7131008E0,-4.5551506E-1,-9.1293585E-1,4.9114594E-1,-2.9598737E-2,-2.5519313E-2,-2.8240848E-1,-2.9627237E-1,-2.0780419E-1,-9.3749535E-1,1.4606847E-1,-2.0730269E-1,-1.2662021E0,-1.0002563E0,7.35548E-2,-8.908796E-2,-9.52706E-1,4.9818493E-2,2.4582201E-1,2.1271696E-3,-5.628244E-2,3.2628973E-3,2.4188648E-1,-1.2976514E-1,6.80632E-2,1.6552451E-1,3.2527026E-2,5.7534877E-2,-2.3904806E-1,2.4151179E-1,-1.3572921E-1,1.9788949E-1,-2.8877337E-2],"split_indices":[8,0,5,7,3,4,7,7,3,0,0,6,7,6,3,2,5,0,0,6,6,0,6,0,7,1,0,1,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.379073E2,1.0742038E0,3.3683307E2,1.4762717E2,1.892059E2,2.3444073E0,1.4528276E2,1.1905238E2,7.015353E1,1.1757153E0,1.1686919E0,2.1063416E0,1.4317642E2,3.296368E1,8.60887E1,8.476654E0,6.167687E1,1.0813941E0,1.0249476E0,2.9120188E0,1.402644E2,3.951513E0,2.9012167E1,1.7300526E0,8.435865E1,6.534957E0,1.9416975E0,6.4521155E0,5.5224754E1,1.5936908E0,1.318328E0,9.742172E1,4.2842686E1,1.3630816E0,2.5884314E0,1.8939028E1,1.0073138E1,8.521384E0,7.5837265E1,1.3334855E0,5.2014713E0,4.115728E0,2.3363876E0,2.567544E0,5.2657207E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"45","size_leaf_vector":"1"}},{"base_weights":[-2.7336043E-3,-5.660728E-3,3.0757168E-1,-7.012179E-4,-3.8988608E-1,1.1376912E-1,2.3548665E-2,-1.0620569E-2,1.4830351E-1,-2.1188965E-1,8.7714046E-2,-1.4291023E-3,-4.7565916E-1,-3.170343E-2,3.9478332E-1,-8.182979E-3,2.8283098E-1,-2.118877E-1,-7.5352594E-2,2.7200124E-1,-2.0430765E-1,5.6801236E-1,-8.1869245E-2,4.2668497E-3,-2.9571172E-2,1.4666845E-1,-5.5188626E-2,5.743485E-2,-8.245638E-2,2.1595004E-1,-3.8499057E-2,1.69918E-2,-1.9164954E-1,2.1127167E-1,-3.979077E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":61,"left_children":[1,3,5,7,9,-1,-1,11,13,-1,-1,15,17,19,21,23,25,-1,27,29,31,33,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.0677173E-1,6.3555586E-1,2.2044092E-2,4.8994374E-1,1.124042E0,0E0,0E0,1.3274941E0,9.3049693E-1,0E0,0E0,5.8923215E-1,5.02522E-1,7.309143E-1,1.1736416E0,6.081257E-1,7.750972E-1,0E0,2.293249E-1,1.0062644E0,1.0323641E0,7.918179E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,7,7,8,8,11,11,12,12,13,13,14,14,15,15,16,16,18,18,19,19,20,20,21,21],"right_children":[2,4,6,8,10,-1,-1,12,14,-1,-1,16,18,20,22,24,26,-1,28,30,32,34,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[3.0829902E0,2.5453355E0,-3.9281076E-1,7.4123514E-1,-3.113201E-1,1.1376912E-1,2.3548665E-2,5.699578E-1,1.1339923E0,-2.1188965E-1,8.7714046E-2,1.6019539E0,-6.396795E-1,1.3452524E-1,3.6909416E0,5.599659E-1,8.7190956E-1,-2.118877E-1,6.0531694E-1,1.0552436E0,-9.3110704E-1,1.8545378E0,-8.1869245E-2,4.2668497E-3,-2.9571172E-2,1.4666845E-1,-5.5188626E-2,5.743485E-2,-8.245638E-2,2.1595004E-1,-3.8499057E-2,1.69918E-2,-1.9164954E-1,2.1127167E-1,-3.979077E-2],"split_indices":[9,9,1,4,5,0,0,4,9,0,0,7,3,7,0,2,5,0,0,4,5,4,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.348252E2,3.3266885E2,2.1563227E0,3.294186E2,3.2502496E0,1.1432257E0,1.0130969E0,3.097285E2,1.9690125E1,2.1964045E0,1.0538452E0,3.04709E2,5.0194864E0,1.1883123E1,7.8070016E0,2.9858597E2,6.1230392E0,2.6996994E0,2.319787E0,4.0975094E0,7.7856135E0,6.317269E0,1.4897326E0,2.399487E2,5.863726E1,4.2149982E0,1.9080412E0,1.0097588E0,1.3100282E0,1.5573171E0,2.5401921E0,5.407733E0,2.377881E0,5.2846527E0,1.032616E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"35","size_leaf_vector":"1"}},{"base_weights":[-2.4238E-3,-6.834356E-2,1.1274229E-2,-2.9743102E-1,5.8876406E-3,1.16079845E-1,-1.3046085E-2,-5.657887E-1,1.6188976E-1,5.4234415E-2,-4.8924956E-1,-2.0396903E-1,2.208635E-1,6.529815E-2,-7.2104394E-2,-7.932648E-1,6.2808655E-2,3.8892445E-1,-1.3985029E-1,-3.524431E-2,2.903556E-1,-1.9167463E-1,-5.0077762E-2,7.445688E-1,-4.4978333E-1,-8.4943786E-2,4.035707E-1,1.7114392E-1,-2.6082762E-2,-5.681209E-1,-4.2079225E-2,-2.7494636E-1,-7.568768E-2,1.7517537E-1,-1.3923918E-1,-2.3943229E-2,6.0039055E-1,-3.9561382E-1,1.4031294E-1,6.186641E-1,-7.7399075E-2,2.7403286E-1,6.422466E-2,9.52999E-2,-5.9993273E-1,5.078193E-1,-3.0732155E-1,7.545815E-1,1.539081E-1,9.505432E-2,5.9160405E-1,-9.745508E-2,6.599585E-1,-2.7544048E-1,1.3644212E-1,-2.5608814E-1,4.7293827E-3,2.1347745E-1,5.1555667E-2,-3.0649036E-2,-2.0792483E-1,-7.180453E-2,7.309833E-2,1.8222213E-2,2.2163814E-1,-1.3877027E-1,1.0741543E-1,-4.9057577E-2,-2.6266435E-1,2.4525139E-1,2.9680153E-2,6.323883E-2,-1.2610422E-1,2.6736364E-1,-4.7556803E-2,1.4213307E-1,-4.6928447E-2,-4.149598E-2,9.7723E-2,-2.8730087E-2,2.2986548E-1,1.3712066E-2,-1.1978192E-1,2.6115692E-1,-3.135643E-2,-1.41567E-1,1.2048946E-1,-2.1059638E-2,4.7975373E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":62,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,-1,37,39,-1,-1,41,43,45,47,49,51,53,55,-1,-1,-1,-1,-1,57,59,61,63,65,-1,-1,-1,67,69,71,73,75,77,79,81,83,-1,-1,85,87,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.0282643E-1,9.828588E-1,7.0965064E-1,1.834377E0,1.0931034E0,1.8032418E0,1.052199E0,1.357774E0,1.0356685E0,8.8686234E-1,8.936417E-2,3.4711604E0,2.2678838E0,9.507298E-1,1.9056255E0,2.2139359E-1,1.192877E0,5.381763E-1,0E0,2.01554E0,1.4376454E0,0E0,0E0,8.6191535E-2,1.4933832E0,2.235809E0,2.092144E0,1.398184E0,2.6776273E0,3.0366175E0,1.2358114E0,0E0,0E0,0E0,0E0,0E0,4.572487E-2,8.3794796E-1,9.0469015E-1,3.7017822E-1,1.2252849E0,0E0,0E0,0E0,1.0358255E0,5.1188886E-1,7.969707E-1,1.4159732E0,1.6605418E0,2.1816354E0,8.8652945E-1,2.1447184E0,9.2127943E-1,0E0,0E0,3.3315508E0,1.1985384E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,19,19,20,20,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,36,36,37,37,38,38,39,39,40,40,44,44,45,45,46,46,47,47,48,48,49,49,50,50,51,51,52,52,55,55,56,56],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,-1,38,40,-1,-1,42,44,46,48,50,52,54,56,-1,-1,-1,-1,-1,58,60,62,64,66,-1,-1,-1,68,70,72,74,76,78,80,82,84,-1,-1,86,88,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.3469358E-1,-9.3749535E-1,-1.2823372E-1,3.664962E-1,5.2817637E-1,-3.8474053E-1,-1.7241693E-1,1.4398967E0,-1.1689172E0,-1.1807964E-2,-1.180954E0,-9.3749535E-1,-6.805919E-1,3.53506E-1,-1.0164239E-1,3.6481115E-1,-6.004373E-1,-1.6317608E0,-1.3985029E-1,-4.2400864E-1,-1.9863154E-1,-1.9167463E-1,-5.0077762E-2,-8.1500554E-1,-7.146222E-1,2.5842196E-1,1.10681206E-1,2.8468606E-1,-2.4319147E-1,-2.7132162E-1,-6.289329E-1,-2.7494636E-1,-7.568768E-2,1.7517537E-1,-1.3923918E-1,-2.3943229E-2,-9.52706E-1,-3.9751104E-1,-1.9011056E-1,-7.573051E-2,4.2916662E-1,2.7403286E-1,6.422466E-2,9.52999E-2,-1.0994488E0,-4.5770618E-1,-8.0026084E-1,7.9795814E-1,-2.7735096E-1,-7.573051E-2,-9.155495E-1,-6.855323E-1,2.8029475E-1,-2.7544048E-1,1.3644212E-1,6.2878585E-1,1.9592093E-1,2.1347745E-1,5.1555667E-2,-3.0649036E-2,-2.0792483E-1,-7.180453E-2,7.309833E-2,1.8222213E-2,2.2163814E-1,-1.3877027E-1,1.0741543E-1,-4.9057577E-2,-2.6266435E-1,2.4525139E-1,2.9680153E-2,6.323883E-2,-1.2610422E-1,2.6736364E-1,-4.7556803E-2,1.4213307E-1,-4.6928447E-2,-4.149598E-2,9.7723E-2,-2.8730087E-2,2.2986548E-1,1.3712066E-2,-1.1978192E-1,2.6115692E-1,-3.135643E-2,-1.41567E-1,1.2048946E-1,-2.1059638E-2,4.7975373E-2],"split_indices":[2,0,2,1,0,6,6,8,0,0,6,0,9,2,6,5,4,0,0,5,5,0,0,9,4,10,6,2,6,1,0,0,0,0,0,0,7,0,6,9,6,0,0,0,7,4,4,9,9,9,6,6,5,0,0,2,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.3351352E2,5.669377E1,2.7681973E2,1.3138811E1,4.355496E1,5.142719E1,2.2539255E2,8.147089E0,4.991722E0,4.0491688E1,3.0632715E0,1.2450891E1,3.8976295E1,9.683273E1,1.2855981E2,5.78989E0,2.357199E0,3.9460323E0,1.0456898E0,2.9981033E1,1.05106535E1,1.4211612E0,1.6421103E0,2.1449723E0,1.0305918E1,1.4777481E1,2.4198816E1,4.446102E1,5.237171E1,6.314965E0,1.2224486E2,4.1503677E0,1.6395221E0,1.1307893E0,1.2264098E0,1.4203259E0,2.5257063E0,9.410485E0,2.0570549E1,5.192679E0,5.3179746E0,1.0794474E0,1.0655249E0,1.503827E0,8.802091E0,3.6812553E0,1.1096226E1,9.217356E0,1.4981461E1,3.8686157E1,5.7748637E0,4.8213737E1,4.157972E0,4.781373E0,1.5335921E0,2.113674E1,1.01108116E2,1.4809169E0,1.0447896E0,5.4135303E0,3.9969554E0,4.119853E0,1.6450695E1,1.1867292E0,4.0059495E0,2.7888677E0,2.529107E0,4.0239305E0,4.7781606E0,1.5260745E0,2.155181E0,1.8322135E0,9.264011E0,8.038506E0,1.17885E0,7.1180797E0,7.863381E0,1.9430197E1,1.925596E1,1.2612982E0,4.5135655E0,3.3277645E1,1.4936095E1,3.1512845E0,1.0066872E0,1.6127684E1,5.0090556E0,6.8555695E1,3.2552425E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"89","size_leaf_vector":"1"}},{"base_weights":[-1.6038192E-3,2.218195E-2,-4.4490084E-2,4.1021034E-3,1.8018991E-1,6.684004E-2,-1.0209662E-1,2.3170225E-2,-2.8120863E-1,8.927981E-2,6.3857585E-1,1.9658156E-1,-1.21560045E-1,-3.7914255E-1,-2.6200013E-2,-1.144508E-2,1.7073259E-1,-6.037927E-1,4.5650646E-2,3.2646212E-1,-2.8628445E-1,2.3459381E-1,4.5815226E-2,-9.69169E-2,3.6836347E-1,-5.663996E-1,1.20356716E-1,-4.8513705E-1,9.734257E-2,-6.5181476E-1,1.6789561E-2,1.5379096E-2,-7.641814E-1,2.8331113E-1,-3.9761913E-1,-2.6205072E-1,-1.269416E-1,-4.9653935E-1,4.1140655E-1,-5.898064E-2,4.3487483E-1,-5.9383136E-1,3.4793723E-1,3.013471E-1,-4.0355828E-1,6.180223E-1,-5.6447577E-2,-2.2374359E-1,-1.3683312E-1,2.695948E-1,-2.9137173E-1,-2.013948E-3,-5.9602994E-1,-2.2817649E-1,-4.8690718E-2,6.9998306E-1,-2.9674958E-2,-4.9336366E-3,7.458886E-2,-4.699707E-2,-2.6368967E-1,1.1752278E-1,-1.768575E-1,-1.9283102E-1,1.0534715E-1,4.107806E-2,-9.222369E-2,-4.081799E-2,-1.8075565E-1,1.4336247E-2,2.0334734E-1,-1.0191988E-2,1.8991143E-1,-2.0794864E-1,-1.8922526E-2,-4.670004E-2,1.9323884E-1,-7.706155E-2,1.7060372E-1,-2.1116959E-1,1.627984E-1,2.2249742E-1,-2.5606798E-2,5.229425E-2,-1.837284E-1,-1.0260609E-1,4.065061E-2,-2.231819E-2,1.4463304E-1,-1.5449922E-1,2.9429441E-2,-1.2573604E-1,1.4117552E-1,-2.4122252E-1,-5.7715833E-2,2.9983459E-2,2.9872465E-1,1.3469097E-1,-2.3359574E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":63,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,39,41,-1,-1,43,45,47,49,51,-1,53,55,57,59,61,63,-1,65,67,69,-1,71,73,75,77,79,81,83,-1,85,87,89,91,93,-1,-1,95,97,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.3667165E-1,6.0587335E-1,7.6188695E-1,1.0486879E0,8.505627E-1,1.0199797E0,1.6297903E0,9.2386633E-1,1.30094E0,1.8159541E0,1.2039256E-1,1.2716534E0,1.9230602E0,1.4025261E0,1.6861557E0,2.9825344E0,2.3109481E0,6.7987776E-1,1.5782399E0,7.8408515E-1,1.7705102E0,0E0,0E0,1.3236047E0,1.69701E0,3.794E-1,8.1525457E-1,7.976084E-1,0E0,9.113753E-2,1.9018508E0,1.0659448E0,2.1671963E-1,3.0002484E0,1.3447707E0,0E0,2.0358784E-1,4.195839E-2,4.2139232E-1,0E0,9.984977E-1,2.604189E-1,5.84882E-1,8.7044513E-1,2.083793E0,9.289775E-1,9.6858263E-1,0E0,2.1762274E-1,7.17131E-1,3.701822E-1,9.4371307E-1,8.6337185E-1,0E0,0E0,5.773827E-1,1.3368782E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,23,23,24,24,25,25,26,26,27,27,29,29,30,30,31,31,32,32,33,33,34,34,36,36,37,37,38,38,40,40,41,41,42,42,43,43,44,44,45,45,46,46,48,48,49,49,50,50,51,51,52,52,55,55,56,56],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,42,-1,-1,44,46,48,50,52,-1,54,56,58,60,62,64,-1,66,68,70,-1,72,74,76,78,80,82,84,-1,86,88,90,92,94,-1,-1,96,98,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-2.8569296E-1,-4.5244622E-1,-1.1632603E0,7.6534563E-1,9.599756E-1,4.5761305E-1,-1.0164239E-1,3.0531117E-1,-3.445578E-1,2.3453663E-1,5.691137E-1,1.1339923E0,8.524527E-1,1.2150863E0,-4.803529E-1,2.699239E-1,5.599659E-1,8.7190956E-1,5.9381795E-1,-7.8681755E-1,8.7190956E-1,2.3459381E-1,4.5815226E-2,-1.7294567E0,1.9167074E0,2.2252698E0,1.4028195E0,-8.8221234E-1,9.734257E-2,1.7788109E-1,4.5194067E-3,1.590698E-1,8.3089806E-2,-5.491631E-1,1.1818031E0,-2.6205072E-1,1.1254588E0,-3.1497508E-1,-9.3936574E-1,-5.898064E-2,-4.26067E-1,-2.1057709E-1,-2.8642884E-1,-1.4945498E0,3.6909416E0,-4.803529E-1,6.4971733E-1,-2.2374359E-1,1.60444E0,-1.504633E0,9.981511E-1,-1.0838329E0,-1.1411514E-1,-2.2817649E-1,-4.8690718E-2,9.406206E-3,-6.575297E-1,-4.9336366E-3,7.458886E-2,-4.699707E-2,-2.6368967E-1,1.1752278E-1,-1.768575E-1,-1.9283102E-1,1.0534715E-1,4.107806E-2,-9.222369E-2,-4.081799E-2,-1.8075565E-1,1.4336247E-2,2.0334734E-1,-1.0191988E-2,1.8991143E-1,-2.0794864E-1,-1.8922526E-2,-4.670004E-2,1.9323884E-1,-7.706155E-2,1.7060372E-1,-2.1116959E-1,1.627984E-1,2.2249742E-1,-2.5606798E-2,5.229425E-2,-1.837284E-1,-1.0260609E-1,4.065061E-2,-2.231819E-2,1.4463304E-1,-1.5449922E-1,2.9429441E-2,-1.2573604E-1,1.4117552E-1,-2.4122252E-1,-5.7715833E-2,2.9983459E-2,2.9872465E-1,1.3469097E-1,-2.3359574E-2],"split_indices":[7,7,6,6,6,1,6,6,9,6,8,9,1,7,5,6,2,5,10,3,5,0,0,6,0,0,9,9,0,7,6,3,5,7,5,0,5,4,7,0,8,1,4,5,0,5,10,0,9,2,4,9,5,0,0,2,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.28076E2,2.1134286E2,1.1673313E2,1.9056381E2,2.0779045E1,3.9750744E1,7.698239E1,1.7948698E2,1.1076833E1,1.8337084E1,2.4419608E0,2.3514252E1,1.6236492E1,1.56950655E1,6.1287323E1,1.461301E2,3.3356888E1,5.1484585E0,5.928375E0,1.1319289E1,7.0177946E0,1.4148742E0,1.0270867E0,8.842384E0,1.4671867E1,5.24698E0,1.09895115E1,1.3910125E1,1.7849406E0,3.0300293E0,5.8257294E1,1.4204771E2,4.0823803E0,2.826058E1,5.096306E0,2.7571726E0,2.391286E0,2.2441366E0,3.684238E0,1.8036532E0,9.5156355E0,4.7689295E0,2.2488654E0,3.8540425E0,4.9883423E0,8.954529E0,5.7173386E0,3.181853E0,2.0651271E0,8.3192625E0,2.6702483E0,2.7868629E0,1.1123261E1,2.0260565E0,1.0039729E0,2.8141656E0,5.544313E1,1.2581156E2,1.623616E1,1.0245389E0,3.0578413E0,2.5628023E1,2.632557E0,3.9459624E0,1.1503435E0,1.0692303E0,1.3220557E0,1.028755E0,1.2153816E0,2.056629E0,1.627609E0,3.07317E0,6.442466E0,3.7593753E0,1.0095539E0,1.0085342E0,1.2403312E0,1.2606963E0,2.5933464E0,3.980866E0,1.0074762E0,7.5695705E0,1.3849578E0,4.5254908E0,1.1918477E0,1.0318295E0,1.0332977E0,3.4251182E0,4.8941445E0,1.4915354E0,1.1787128E0,1.5404512E0,1.2464117E0,6.6854324E0,4.437829E0,1.3708853E0,1.4432805E0,4.310724E0,5.1132404E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"99","size_leaf_vector":"1"}},{"base_weights":[-4.064896E-3,-1.5999181E-3,-1.08723186E-1,-1.5216282E-2,7.202547E-2,4.1498346E-4,-2.8071636E-1,2.740816E-2,4.015067E-1,2.508773E-1,-1.1528556E-2,3.2759047E-1,-4.4904593E-1,-2.3019487E-1,1.0838957E-1,-5.828173E-2,5.694862E-1,4.975781E-1,-1.1349343E-1,-3.1514555E-1,-4.6533826E-4,2.2517703E-1,-3.8092032E-2,1.4321738E-1,-5.769816E-1,2.884243E-1,-4.750692E-1,1.7829621E-1,-5.473028E-1,2.5073075E-1,-2.9309658E-2,-3.9537102E-2,2.0448647E-1,-1.6064836E-1,1.9062749E-1,-1.6611296E-1,9.850543E-2,-2.9646019E-3,8.775118E-2,-2.1961813E-1,4.5315664E-2,1.5610187E-1,-6.4664856E-2,-1.8875104E-1,2.312742E-2,-7.315023E-2,6.568373E-2,-1.6857207E-2,-2.137385E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":64,"left_children":[1,3,-1,5,7,9,11,13,15,17,19,21,23,25,27,-1,29,31,33,35,37,-1,-1,-1,39,41,43,45,47,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.8492224E-1,3.2391733E-1,0E0,1.1313516E0,7.303891E-1,7.759398E-1,1.6991485E0,9.739476E-1,7.052718E-1,1.1177363E0,8.31593E-1,8.381628E-1,1.7325697E0,1.5594556E0,1.7152125E0,0E0,9.5027757E-1,9.2677355E-1,2.103998E0,1.5180068E0,6.647601E-1,0E0,0E0,0E0,1.38129E0,6.047554E-1,7.2801995E-1,6.135551E-1,2.5567865E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,17,17,18,18,19,19,20,20,24,24,25,25,26,26,27,27,28,28],"right_children":[2,4,-1,6,8,10,12,14,16,18,20,22,24,26,28,-1,30,32,34,36,38,-1,-1,-1,40,42,44,46,48,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.0923681E0,7.8739315E-1,-1.08723186E-1,6.465325E-1,1.3065345E0,-1.2854533E0,-9.395118E-1,-5.1479864E-1,7.529394E-2,-6.7486215E-1,-1.1510396E0,6.747046E-1,-8.708964E-1,-1.9705367E-1,-2.1057709E-1,-5.828173E-2,-2.834341E-2,-1.4212476E0,5.069149E-1,1.8145575E-1,5.901882E-1,2.2517703E-1,-3.8092032E-2,1.4321738E-1,6.313125E-1,-5.6583846E-1,-2.7132162E-1,-1.2508602E0,-6.359675E-1,2.5073075E-1,-2.9309658E-2,-3.9537102E-2,2.0448647E-1,-1.6064836E-1,1.9062749E-1,-1.6611296E-1,9.850543E-2,-2.9646019E-3,8.775118E-2,-2.1961813E-1,4.5315664E-2,1.5610187E-1,-6.4664856E-2,-1.8875104E-1,2.312742E-2,-7.315023E-2,6.568373E-2,-1.6857207E-2,-2.137385E-1],"split_indices":[6,5,0,5,10,9,1,4,6,8,9,5,3,2,1,0,1,8,8,6,5,0,0,0,8,7,1,10,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.222168E2,3.210131E2,1.2037109E0,2.7158008E2,4.9433006E1,2.5742282E2,1.4157281E1,4.4491318E1,4.9416885E0,1.0806185E1,2.4661662E2,2.8634098E0,1.1293871E1,1.0200576E1,3.429074E1,1.0524391E0,3.889249E0,6.225539E0,4.5806456E0,7.703984E0,2.3891264E2,1.1458732E0,1.7175366E0,1.0389432E0,1.0254928E1,3.2145777E0,6.985998E0,3.164499E1,2.645751E0,2.5991945E0,1.2900546E0,1.4728365E0,4.7527027E0,3.1120443E0,1.468601E0,5.722139E0,1.9818449E0,2.3240878E2,6.503853E0,8.449554E0,1.8053734E0,2.179265E0,1.0353127E0,5.352668E0,1.6333301E0,2.3405728E0,2.9304417E1,1.0031204E0,1.6426306E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"49","size_leaf_vector":"1"}},{"base_weights":[-2.7889176E-3,3.0509645E-2,-3.5813157E-2,4.067978E-2,-1.4796817E-1,-2.0373422E-1,-2.688039E-2,2.3145355E-2,3.2955503E-1,-4.513884E-2,1.9260907E-1,1.268071E-2,2.1096516E-1,-2.95376E-1,8.149168E-1,3.5814863E-1,-5.617009E-2,4.843228E-1,2.6393522E-2,2.216599E-2,-2.0242177E-1,-1.8897675E-1,6.4329825E-2,3.3406562E-1,-8.657105E-3,2.4089791E-1,-1.0798067E-1,-2.0552342E-1,-2.873666E-2,-4.160417E-2,2.1599422E-1,1.371019E-1,-8.4289566E-2,1.5038288E-3,9.4333E-2,-1.1414971E-1,1.15398526E-1,-2.2986155E-2,2.067692E-2,-1.0889958E-1,5.6929745E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":65,"left_children":[1,3,5,7,-1,-1,9,11,13,15,17,19,-1,21,23,25,27,29,31,33,-1,-1,-1,-1,-1,-1,-1,35,37,-1,-1,-1,39,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.537541E-1,8.735093E-1,9.096455E-1,7.931836E-1,0E0,0E0,6.511155E-1,1.059493E0,2.9619906E0,6.8210465E-1,5.933455E-1,9.856454E-1,0E0,9.2916775E-1,1.3716104E0,1.5978956E0,5.9108746E-1,7.5755024E-1,4.684741E-1,7.3948044E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,2.464491E0,5.8634865E-1,0E0,0E0,0E0,6.815163E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,6,6,7,7,8,8,9,9,10,10,11,11,13,13,14,14,15,15,16,16,17,17,18,18,19,19,27,27,28,28,32,32],"right_children":[2,4,6,8,-1,-1,10,12,14,16,18,20,-1,22,24,26,28,30,32,34,-1,-1,-1,-1,-1,-1,-1,36,38,-1,-1,-1,40,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-6.7923063E-1,1.4176449E-1,-6.658904E-1,-7.325917E-1,-1.4796817E-1,-2.0373422E-1,1.3065345E0,2.7579596E-2,-4.2915997E-1,-6.525501E-1,-1.4983252E-1,-7.526021E-1,2.1096516E-1,-4.5355532E-1,-1.4293733E-1,-3.2037044E-1,-4.5244622E-1,-5.750444E-1,-1.435813E0,1.1793455E0,-2.0242177E-1,-1.8897675E-1,6.4329825E-2,3.3406562E-1,-8.657105E-3,2.4089791E-1,-1.0798067E-1,2.641684E-1,5.7071567E-1,-4.160417E-2,2.1599422E-1,1.371019E-1,-5.5560064E-1,1.5038288E-3,9.4333E-2,-1.1414971E-1,1.15398526E-1,-2.2986155E-2,2.067692E-2,-1.0889958E-1,5.6929745E-2],"split_indices":[7,4,7,7,0,0,10,4,4,7,1,7,0,1,9,0,7,1,2,2,0,0,0,0,0,0,0,3,6,0,0,0,3,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.1970053E2,1.59227E2,1.6047353E2,1.5709872E2,2.1282783E0,1.1702726E0,1.5930327E2,1.49127E2,7.9717298E0,1.478025E2,1.1500753E1,1.4787029E2,1.2567165E0,3.6559353E0,4.3157945E0,3.097444E0,1.4470506E2,3.479788E0,8.020966E0,1.4681227E2,1.0580162E0,2.0651426E0,1.5907927E0,2.9514613E0,1.3643329E0,1.8395944E0,1.2578496E0,2.1447683E1,1.2325738E2,1.0679612E0,2.4118268E0,1.0003066E0,7.0206585E0,1.3963452E2,7.177747E0,1.6817106E1,4.630576E0,8.285025E1,4.0407135E1,3.31987E0,3.7007885E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"41","size_leaf_vector":"1"}},{"base_weights":[-1.1207622E-3,1.8037318E-1,-6.376212E-3,-1.936576E-1,4.0274605E-1,-1.9417307E-1,1.4196633E-3,-1.3809341E-1,5.5615965E-2,-2.3667349E-2,5.8467406E-1,-2.796762E-1,1.13649555E-1,1.3846913E-1,-7.584893E-3,2.2407153E-1,4.320146E-2,-4.3092407E-2,-5.054528E-1,-2.7798308E-2,5.888952E-1,-9.2505835E-2,1.4445559E-2,-3.136573E-1,1.4764425E-1,-2.1513437E-1,-1.0096038E-1,-2.3033991E-1,4.4709143E-1,2.7248946E-1,-4.6287384E-2,-2.0713599E-1,1.518523E-1,2.4282111E-1,-1.1296326E-3,1.1232902E-1,-1.9961561E-1,-6.976864E-2,2.3454545E-2,1.1356336E-1,-1.5969938E-1,-5.793394E-3,1.8378718E-1,-9.132246E-2,8.225534E-2,1.1619407E-1,-3.7163973E-2,1.0569684E-1,-1.5960544E-1,1.7820566E-3,-1.4488526E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":66,"left_children":[1,3,5,7,9,11,13,-1,-1,-1,15,17,-1,19,21,-1,-1,23,25,27,29,31,33,35,-1,-1,37,39,41,-1,-1,43,45,47,49,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.0508834E-1,8.266876E-1,4.550771E-1,4.9281898E-1,5.692311E-1,7.129319E-1,3.703568E-1,0E0,0E0,0E0,2.1072102E-1,5.847809E-1,0E0,1.4020014E0,5.2846074E-1,0E0,0E0,1.1157058E0,4.1709197E-1,1.4946365E0,1.3349507E0,1.6609269E0,7.991185E-1,1.4641067E0,0E0,0E0,9.192311E-2,2.1878757E0,3.7205952E-1,0E0,0E0,1.9571886E0,1.2914405E0,1.4044206E0,7.224374E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,10,10,11,11,13,13,14,14,17,17,18,18,19,19,20,20,21,21,22,22,23,23,26,26,27,27,28,28,31,31,32,32,33,33,34,34],"right_children":[2,4,6,8,10,12,14,-1,-1,-1,16,18,-1,20,22,-1,-1,24,26,28,30,32,34,36,-1,-1,38,40,42,-1,-1,44,46,48,50,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-9.42992E-1,-4.8826578E-1,-8.2880706E-1,-3.9281076E-1,-1.6330593E0,-3.857449E-1,-1.6897553E0,-1.3809341E-1,5.5615965E-2,-2.3667349E-2,-1.0000844E0,-5.135612E-1,1.13649555E-1,-1.7397814E0,-6.289329E-1,2.2407153E-1,4.320146E-2,-7.0273864E-1,1.683855E0,3.929833E-1,-2.7735096E-1,-1.1418654E-1,-7.431684E-1,-7.060735E-1,1.4764425E-1,-2.1513437E-1,-2.432298E-1,-8.498767E-1,-9.494192E-1,2.7248946E-1,-4.6287384E-2,9.995786E-1,-5.461783E-1,5.2817637E-1,2.438496E0,1.1232902E-1,-1.9961561E-1,-6.976864E-2,2.3454545E-2,1.1356336E-1,-1.5969938E-1,-5.793394E-3,1.8378718E-1,-9.132246E-2,8.225534E-2,1.1619407E-1,-3.7163973E-2,1.0569684E-1,-1.5960544E-1,1.7820566E-3,-1.4488526E-1],"split_indices":[4,8,4,1,7,7,7,0,0,0,4,3,0,7,0,0,0,3,10,5,9,3,4,0,0,0,0,3,9,0,0,9,9,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.1764954E2,8.001491E0,3.0964804E2,3.0315537E0,4.969937E0,1.1388973E1,2.9825906E2,1.6537541E0,1.3777997E0,1.5178727E0,3.452064E0,1.0355631E1,1.033342E0,1.7501923E1,2.8075714E2,2.01523E0,1.4368343E0,5.638304E0,4.717327E0,1.3468521E1,4.0334005E0,5.7172997E1,2.2358415E2,4.020115E0,1.6181892E0,2.5894513E0,2.1278756E0,9.802635E0,3.6658862E0,2.6656175E0,1.3677832E0,3.9020515E1,1.8152477E1,1.3343349E1,2.102408E2,1.3380116E0,2.6821034E0,1.0547991E0,1.0730766E0,3.1659172E0,6.636718E0,1.1916481E0,2.4742382E0,3.2766205E1,6.254313E0,9.572973E0,8.579505E0,1.2168913E1,1.1744369E0,2.0817392E2,2.06689E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"51","size_leaf_vector":"1"}},{"base_weights":[8.803285E-5,1.5870884E-2,-6.1956108E-2,2.0598352E-2,-1.5322718E-1,-3.656398E-1,-5.155892E-3,8.398964E-3,2.4409619E-1,-6.3168645E-1,1.0974364E-1,5.4199494E-2,-2.4690187E-1,-2.547833E-2,7.678483E-2,-1.0989309E-1,3.6294657E-1,-2.617456E-1,3.5204787E-2,1.6635811E-1,-1.0659543E-1,-4.162515E-2,2.9809693E-1,-6.140033E-1,1.7675951E-1,3.650718E-1,-3.9032463E-2,3.838035E-2,2.2596876E-1,5.8086884E-1,-4.0392E-3,2.3283021E-1,-2.3190655E-1,6.791692E-1,-1.969782E-1,-2.3018266E-1,-2.7956361E-2,4.3613416E-1,-1.369158E-1,-6.6394754E-2,1.887164E-1,-3.073437E-2,7.7948286E-3,-7.563871E-2,3.0793887E-2,2.3931092E-1,-6.455179E-2,-1.1030441E-1,1.558475E-1,1.1712816E-1,-1.9458306E-1,7.3203385E-2,-1.1737583E-1,3.6330547E-2,2.7983135E-1,-1.8004408E-1,1.3725723E-1,1.7873994E-1,-1.0933034E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":67,"left_children":[1,3,5,7,-1,9,11,13,15,17,19,21,23,25,27,-1,29,-1,-1,-1,-1,31,33,35,37,39,41,43,-1,45,47,49,51,53,55,-1,-1,57,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.1051368E-1,6.398675E-1,1.1073487E0,6.836757E-1,0E0,1.3490117E0,8.0526835E-1,5.568567E-1,1.0747366E0,1.2992101E0,1.1242834E0,1.0671308E0,1.8285086E0,8.6610675E-1,2.0388734E0,0E0,9.115217E-1,0E0,0E0,0E0,0E0,1.7918388E0,2.5584145E0,4.1931748E-1,1.1696776E0,9.896917E-1,6.476134E-1,1.4517614E0,0E0,1.4040172E0,1.1864278E0,2.2152145E0,1.623789E0,9.4663906E-1,1.954144E0,0E0,0E0,3.7666494E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,21,21,22,22,23,23,24,24,25,25,26,26,27,27,29,29,30,30,31,31,32,32,33,33,34,34,37,37],"right_children":[2,4,6,8,-1,10,12,14,16,18,20,22,24,26,28,-1,30,-1,-1,-1,-1,32,34,36,38,40,42,44,-1,46,48,50,52,54,56,-1,-1,58,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[4.3232614E-1,1.3492355E0,6.004839E-1,2.641684E-1,-1.5322718E-1,-3.720675E-1,-8.6605296E-2,1.9577834E-1,-8.603547E-1,1.683855E0,-6.35789E-1,-2.5788262E-1,1.1150343E0,-1.607844E0,1.2481655E0,-1.0989309E-1,1.9577834E-1,-2.617456E-1,3.5204787E-2,1.6635811E-1,-1.0659543E-1,-6.35789E-1,1.044998E0,1.8406473E0,1.7687072E0,-4.2400864E-1,-2.7132162E-1,-6.7486215E-1,2.2596876E-1,1.4119537E-1,9.282538E-1,-3.1497508E-1,7.8966135E-1,4.6455386E-1,1.3492355E0,-2.3018266E-1,-2.7956361E-2,-1.0831622E0,-1.369158E-1,-6.6394754E-2,1.887164E-1,-3.073437E-2,7.7948286E-3,-7.563871E-2,3.0793887E-2,2.3931092E-1,-6.455179E-2,-1.1030441E-1,1.558475E-1,1.1712816E-1,-1.9458306E-1,7.3203385E-2,-1.1737583E-1,3.6330547E-2,2.7983135E-1,-1.8004408E-1,1.3725723E-1,1.7873994E-1,-1.0933034E-2],"split_indices":[3,6,3,3,0,4,4,5,0,10,1,4,7,8,2,0,5,0,0,0,0,1,7,3,7,5,1,8,0,7,5,4,3,6,6,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.1509457E2,2.5178854E2,6.3306023E1,2.5050072E2,1.2878234E0,9.118177E0,5.4187847E1,2.38519E2,1.1981721E1,5.6354976E0,3.4826794E0,4.4128796E1,1.00590515E1,1.5992303E2,7.8595955E1,1.6133853E0,1.0368336E1,4.1398225E0,1.4956748E0,1.6818199E0,1.8008596E0,3.227689E1,1.1851906E1,5.14856E0,4.9104915E0,4.4941387E0,1.554289E2,7.537344E1,3.2225158E0,6.1396804E0,4.2286553E0,1.3123866E1,1.9153025E1,6.4869103E0,5.3649955E0,3.6122742E0,1.5362861E0,3.704193E0,1.2062985E0,1.4453021E0,3.0488365E0,7.839103E1,7.7037865E1,1.312403E1,6.2249413E1,4.823671E0,1.3160095E0,2.6710474E0,1.5576079E0,1.1605856E1,1.5180094E0,4.670966E0,1.4482059E1,2.4884477E0,3.9984627E0,3.371329E0,1.9936663E0,2.5738456E0,1.1303474E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"59","size_leaf_vector":"1"}},{"base_weights":[-6.636599E-4,-1.4022331E-2,7.6657996E-2,4.059533E-2,-4.987171E-2,1.3316062E-1,-3.198998E-1,-1.419093E-1,9.542137E-2,-2.4311723E-1,-2.0262111E-2,-7.607357E-2,2.6091123E-1,-5.277215E-1,4.3906588E-2,5.238355E-1,-3.0181685E-1,3.1875774E-2,3.5044482E-1,-5.0025143E-2,-5.438078E-1,-1.2768291E-1,2.7815402E-2,-6.5090257E-1,1.2579556E-1,-1.7526355E-3,4.8155406E-1,-2.153825E-1,-1.9759214E-2,2.2943212E-2,6.4719766E-1,7.989489E-2,-6.206813E-1,3.4739587E-1,-3.5238765E-2,-1.8853292E-1,5.505031E-1,-3.361739E-1,2.3463668E-1,-6.484486E-1,2.7026562E-2,1.7785159E-1,-2.3300946E-1,8.983265E-2,-1.1412681E-1,-2.555731E-2,-2.5590593E-1,2.360034E-1,-1.5723915E-1,-1.2550502E-1,1.3448724E-1,6.813764E-1,1.3898125E-1,5.8438644E-2,2.3298521E-1,-3.2696147E-2,1.9410948E-1,1.1316399E-1,-2.3278259E-1,1.9919804E-1,-1.8860372E-2,6.275913E-3,-1.2398594E-1,-1.8846051E-1,2.6175737E-2,2.1958956E-1,-3.332785E-2,1.3594993E-2,-2.0204349E-1,1.1436071E-1,-7.7559896E-2,-2.451381E-1,-2.6851004E-2,1.3642906E-1,-6.3218474E-2,-1.5334615E-1,-1.5857158E-2,7.498906E-2,-8.400256E-3,-9.221879E-2,2.6238581E-2,3.0666795E-2,-2.0890795E-1,-9.24567E-2,4.7483847E-2,4.177677E-2,2.5128186E-1,-1.261348E-1,1.4839393E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":68,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,-1,29,31,33,35,37,39,41,43,45,47,49,51,-1,-1,-1,53,55,57,59,61,63,65,67,69,71,-1,73,75,77,79,-1,-1,-1,81,83,-1,85,87,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.2348076E-1,5.24804E-1,1.0763631E0,1.0777153E0,9.19976E-1,1.1181476E0,6.632515E-1,2.8164463E0,1.3238906E0,1.214464E0,7.3307306E-1,1.979258E0,1.4924822E0,3.3930254E-1,0E0,2.3757458E-1,2.5774918E0,1.4320846E0,1.8649218E0,1.2293688E0,6.0641146E-1,1.4526694E0,8.764831E-1,4.3176532E-1,2.5440319E0,7.6616234E-1,8.6307E-1,0E0,0E0,0E0,7.960355E-2,1.1780466E0,2.0636182E0,1.5812705E0,1.2017403E0,6.812422E-1,1.5419858E0,9.897825E-1,6.529113E-1,6.069107E-1,0E0,1.3665171E0,1.6287568E0,1.3097317E0,1.2097996E0,0E0,0E0,0E0,1.4550107E0,6.075761E-1,0E0,6.10101E-1,1.4835298E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,41,41,42,42,43,43,44,44,48,48,49,49,51,51,52,52],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,-1,30,32,34,36,38,40,42,44,46,48,50,52,-1,-1,-1,54,56,58,60,62,64,66,68,70,72,-1,74,76,78,80,-1,-1,-1,82,84,-1,86,88,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.066704E0,-9.660462E-1,5.414032E-1,-5.3669715E-1,-3.3469358E-1,-6.9653356E-1,1.1049571E0,-1.016626E0,-3.03364E-1,2.5842196E-1,-4.862524E-1,-6.289835E-1,-3.3469358E-1,2.5842196E-1,4.3906588E-2,-1.566358E0,-5.6167686E-1,5.9381795E-1,-1.016626E0,-8.894858E-2,9.7883886E-1,3.4824587E-2,9.0406567E-1,-7.717146E-1,-4.862524E-1,-2.1928513E-1,-2.8569296E-1,-2.153825E-1,-1.9759214E-2,2.2943212E-2,-1.2182465E0,5.691137E-1,-1.7131008E0,-4.2915997E-1,6.2878585E-1,-1.4228727E-1,2.8468606E-1,1.1931772E-2,5.2817637E-1,3.6481115E-1,2.7026562E-2,2.8468606E-1,4.820193E-1,-2.1014415E-1,1.3169855E0,-2.555731E-2,-2.5590593E-1,2.360034E-1,-8.1802267E-1,1.3080368E0,1.3448724E-1,-8.0783725E-1,-2.293364E-1,5.8438644E-2,2.3298521E-1,-3.2696147E-2,1.9410948E-1,1.1316399E-1,-2.3278259E-1,1.9919804E-1,-1.8860372E-2,6.275913E-3,-1.2398594E-1,-1.8846051E-1,2.6175737E-2,2.1958956E-1,-3.332785E-2,1.3594993E-2,-2.0204349E-1,1.1436071E-1,-7.7559896E-2,-2.451381E-1,-2.6851004E-2,1.3642906E-1,-6.3218474E-2,-1.5334615E-1,-1.5857158E-2,7.498906E-2,-8.400256E-3,-9.221879E-2,2.6238581E-2,3.0666795E-2,-2.0890795E-1,-9.24567E-2,4.7483847E-2,4.177677E-2,2.5128186E-1,-1.261348E-1,1.4839393E-1],"split_indices":[8,7,7,5,2,1,3,9,3,10,4,4,2,10,0,7,6,10,9,0,3,10,2,4,4,3,7,0,0,0,9,8,7,4,2,5,2,3,0,5,0,2,10,9,2,0,0,0,1,1,0,3,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.111321E2,2.659954E2,4.513672E1,1.05353546E2,1.6064186E2,4.0088947E1,5.047773E0,2.3970919E1,8.138263E1,2.038559E1,1.4025627E2,1.5351032E1,2.4737915E1,3.4003296E0,1.6474433E0,4.2017717E0,1.9769146E1,6.604967E1,1.5332966E1,1.3124135E1,7.2614546E0,4.2852985E1,9.740328E1,3.4117067E0,1.1939325E1,1.1746442E1,1.2991473E1,2.0156858E0,1.3846439E0,1.2583736E0,2.9433982E0,9.338952E0,1.0430194E1,1.085268E1,5.5196983E1,4.166254E0,1.1166712E1,6.45473E0,6.669405E0,6.2129E0,1.0485545E0,1.0809085E1,3.20439E1,6.8041695E1,2.936159E1,1.2722534E0,2.1394532E0,3.0464542E0,8.892871E0,9.778648E0,1.9677938E0,7.5806766E0,5.4107966E0,1.2119669E0,1.7314314E0,7.611732E0,1.7272199E0,1.2118453E0,9.2183485E0,5.776524E0,5.076156E0,4.8880512E1,6.316471E0,1.1133847E0,3.0528693E0,8.679689E0,2.4870224E0,3.4351888E0,3.0195413E0,5.316441E0,1.3529636E0,4.4177914E0,1.7951086E0,6.2119603E0,4.597124E0,1.1873924E1,2.0169977E1,2.8368603E1,3.9673088E1,1.4722155E1,1.4639435E1,6.550643E0,2.342228E0,5.8966994E0,3.881949E0,2.119012E0,5.461664E0,2.0321894E0,3.3786075E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"89","size_leaf_vector":"1"}},{"base_weights":[-1.558306E-3,-3.0806608E-2,3.7842415E-2,7.6479994E-2,-7.976495E-2,3.6371168E-1,9.497103E-3,1.15269676E-1,-3.85426E-1,-1.5970604E-1,1.5647285E-2,-1.4137681E-1,5.7740515E-1,2.5689853E-2,-1.8960539E-1,1.786724E-1,-1.6071421E-1,-1.9505446E-1,-3.7897352E-2,2.615999E-1,-2.0233431E-1,1.10658884E-1,-2.0836085E-1,-3.902917E-2,7.282418E-1,8.170918E-3,4.3488145E-1,1.0339732E-1,5.961937E-1,-4.0841228E-1,2.7449277E-1,2.956231E-2,-4.6141785E-2,-1.00062616E-1,5.517217E-1,-3.85603E-2,-3.1486702E-1,3.0591795E-3,5.457225E-1,-5.7608753E-1,4.3281205E-2,9.6724135E-1,3.2719158E-4,1.4001206E-1,-4.2727955E-2,1.9769353E-1,2.1705875E-2,7.776706E-2,-1.8740017E-2,-4.7287516E-2,2.601953E-1,9.267146E-2,-1.8282007E-1,1.8879619E-1,-6.830546E-2,2.1085937E-1,1.9771645E-3,-4.64589E-2,1.5210722E-1,-1.6359852E-1,-3.474582E-2,4.1461695E-2,-1.4090072E-1,2.4806403E-1,-6.97944E-2,-2.4948114E-1,-7.689092E-2,6.346984E-2,-1.5695389E-1,7.3683434E-3,3.5931298E-1,1.5929621E-1,1.3243874E-2,-8.84862E-2,-2.476879E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":69,"left_children":[1,3,5,7,9,11,13,15,17,19,21,-1,23,25,-1,27,29,-1,31,33,35,37,39,-1,41,43,45,47,49,51,53,-1,-1,-1,55,57,59,61,63,65,67,69,-1,71,73,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.5477856E-1,9.340455E-1,1.2082657E0,1.0454128E0,9.314926E-1,2.1403885E0,1.2805691E0,9.44024E-1,3.9477354E-1,1.2447441E0,1.2242169E0,0E0,1.0324152E0,8.5436004E-1,0E0,1.2901257E0,1.2222933E0,0E0,6.312045E-2,1.3121483E0,1.108902E0,1.8840187E0,1.6123806E0,0E0,1.2839429E0,7.8294694E-1,3.499387E-1,9.893012E-1,1.4365683E0,1.1924214E0,9.154542E-1,0E0,0E0,0E0,3.8831484E-1,1.7227741E0,1.5843315E0,2.192374E0,1.9390633E0,3.6207628E-1,1.1702687E0,1.1793656E0,0E0,1.1940539E0,7.305196E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,12,12,13,13,15,15,16,16,18,18,19,19,20,20,21,21,22,22,24,24,25,25,26,26,27,27,28,28,29,29,30,30,34,34,35,35,36,36,37,37,38,38,39,39,40,40,41,41,43,43,44,44],"right_children":[2,4,6,8,10,12,14,16,18,20,22,-1,24,26,-1,28,30,-1,32,34,36,38,40,-1,42,44,46,48,50,52,54,-1,-1,-1,56,58,60,62,64,66,68,70,-1,72,74,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[5.868313E-2,-2.5497583E-1,1.2588996E-1,8.367388E-1,-1.6608919E-1,-4.803529E-1,1.3138483E0,1.5389027E-1,-4.5355532E-1,-1.1102537E0,6.747046E-1,-1.4137681E-1,-7.3706096E-1,1.6325573E0,-1.8960539E-1,-8.908796E-2,6.70219E-1,-1.9505446E-1,1.376723E0,-1.9705367E-1,-1.1861606E0,4.4932756E-1,-4.0061373E-1,-3.902917E-2,-1.6608919E-1,6.277426E-2,-7.572781E-1,-6.9653356E-1,-6.186598E-1,-6.0997343E-1,8.8282496E-1,2.956231E-2,-4.6141785E-2,-1.00062616E-1,-6.133851E-1,-3.2438374E-1,1.9592093E-1,-2.293364E-1,-1.7727183E-1,4.2916662E-1,-4.5355532E-1,-7.8681755E-1,3.2719158E-4,-5.491631E-1,2.603036E-1,1.9769353E-1,2.1705875E-2,7.776706E-2,-1.8740017E-2,-4.7287516E-2,2.601953E-1,9.267146E-2,-1.8282007E-1,1.8879619E-1,-6.830546E-2,2.1085937E-1,1.9771645E-3,-4.64589E-2,1.5210722E-1,-1.6359852E-1,-3.474582E-2,4.1461695E-2,-1.4090072E-1,2.4806403E-1,-6.97944E-2,-2.4948114E-1,-7.689092E-2,6.346984E-2,-1.5695389E-1,7.3683434E-3,3.5931298E-1,1.5929621E-1,1.3243874E-2,-8.84862E-2,-2.476879E-3],"split_indices":[9,5,9,0,0,5,6,1,1,8,5,0,8,5,0,1,1,0,0,2,7,5,4,0,0,10,1,1,3,2,1,0,0,0,9,3,8,4,8,6,1,3,0,7,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.0584784E2,1.7566484E2,1.30183E2,5.486729E1,1.2079755E2,9.470845E0,1.2071216E2,5.1308945E1,3.5583458E0,6.5361E1,5.5436543E1,1.6843336E0,7.7865114E0,1.18676544E2,2.0356102E0,4.201036E1,9.298587E0,1.5253938E0,2.0329518E0,5.533662E0,5.982734E1,3.928154E1,1.6155005E1,1.3924927E0,6.3940187E0,1.1478228E2,3.8942661E0,3.6650307E1,5.360054E0,5.9651117E0,3.3334756E0,1.0026042E0,1.0303477E0,1.7644637E0,3.7691984E0,2.4912985E1,3.4914356E1,3.230012E1,6.981418E0,6.0397515E0,1.0115253E1,4.56381E0,1.8302089E0,3.1482845E1,8.329944E1,1.9012686E0,1.9929974E0,1.8606745E1,1.804356E1,1.5286105E0,3.8314435E0,1.1880566E0,4.777055E0,1.8049521E0,1.5285234E0,2.7244647E0,1.0447335E0,2.1125744E1,3.7872412E0,1.5374157E1,1.95402E1,2.5669271E1,6.630848E0,5.0830364E0,1.8983814E0,2.467578E0,3.5721736E0,8.28149E0,1.8337636E0,1.113963E0,3.449847E0,5.3056464E0,2.6177198E1,9.107118E0,7.419232E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"75","size_leaf_vector":"1"}},{"base_weights":[-1.4309015E-3,-1.1172666E-2,1.1211923E-1,-3.5470307E-1,-6.6430564E-3,4.738327E-1,1.11835925E-2,-2.0693022E-1,7.090488E-2,1.04966454E-1,-1.00235E-2,2.0704217E-1,-1.1137141E-2,-7.644475E-2,2.4845278E-1,-1.19160354E-1,2.1437951E-3,-2.0625338E-1,2.6392654E-1,4.9579117E-1,-7.904503E-2,1.3525902E-1,-3.882719E-1,9.819185E-3,-2.9629466E-1,8.583772E-2,-4.450351E-1,4.9062335E-1,-5.709513E-2,1.9075E-1,3.0000156E-2,7.1442895E-2,-1.1527775E-1,1.2549987E-1,-4.6439935E-2,-1.5688641E-1,4.794392E-2,1.0536156E-3,1.4068562E-1,-1.8244551E-1,4.4494484E-2,-1.145862E-1,1.2259748E-1,4.8690487E-2,-1.7918348E-1,3.3500273E-2,1.8785062E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":70,"left_children":[1,3,5,7,9,11,13,-1,-1,-1,15,-1,-1,17,19,21,23,25,27,29,31,33,35,37,39,41,43,45,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.3591893E-1,4.3263516E-1,8.7077755E-1,8.797162E-1,3.372702E-1,6.015537E-1,4.3034744E-1,0E0,0E0,0E0,3.6582536E-1,0E0,0E0,7.266066E-1,4.9049228E-1,1.9443544E0,5.7251596E-1,8.4547555E-1,5.7105696E-1,1.1997479E-1,4.0708604E-1,1.2918633E0,1.0938642E0,7.031961E-1,9.591586E-1,1.0688716E0,7.007295E-1,1.03382826E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,10,10,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27],"right_children":[2,4,6,8,10,12,14,-1,-1,-1,16,-1,-1,18,20,22,24,26,28,30,32,34,36,38,40,42,44,46,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9354022E0,-1.7998126E0,-4.5355532E-1,-8.8221234E-1,-1.7664618E0,3.6481115E-1,1.3943407E-1,-2.0693022E-1,7.090488E-2,1.04966454E-1,-9.3749535E-1,2.0704217E-1,-1.1137141E-2,9.323718E-1,-3.2037044E-1,-3.6243847E-1,1.773418E0,2.1463482E-1,2.7738922E0,5.056718E-1,-1.1507303E-1,-5.461783E-1,3.664962E-1,1.6607294E0,3.9471725E-1,-4.26067E-1,-8.8221234E-1,5.069149E-1,-5.709513E-2,1.9075E-1,3.0000156E-2,7.1442895E-2,-1.1527775E-1,1.2549987E-1,-4.6439935E-2,-1.5688641E-1,4.794392E-2,1.0536156E-3,1.4068562E-1,-1.8244551E-1,4.4494484E-2,-1.145862E-1,1.2259748E-1,4.8690487E-2,-1.7918348E-1,3.3500273E-2,1.8785062E-1],"split_indices":[10,7,1,9,7,5,5,0,0,0,0,0,0,9,0,1,5,1,10,5,8,9,1,5,9,8,9,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[3.0154266E2,2.78547E2,2.2995672E1,2.6189039E0,2.759281E2,4.2109365E0,1.8784737E1,1.5648081E0,1.0540957E0,1.6288766E0,2.7429922E2,2.7121105E0,1.4988258E0,1.4213292E1,4.571444E0,2.6631294E1,2.4766792E2,1.0574494E1,3.6387987E0,2.3116927E0,2.2597516E0,1.3945005E1,1.2686289E1,2.4240083E2,5.267088E0,5.0444093E0,5.5300846E0,2.3739543E0,1.2648444E0,1.2595415E0,1.0521511E0,1.2159009E0,1.0438508E0,6.833666E0,7.111339E0,1.0220598E1,2.4656913E0,2.4011008E2,2.2907562E0,2.8795137E0,2.3875744E0,1.9849243E0,3.059485E0,1.0951567E0,4.434928E0,1.1059091E0,1.2680451E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"47","size_leaf_vector":"1"}},{"base_weights":[1.1772738E-5,4.5696765E-2,-1.9598335E-2,1.4929177E-1,-5.48112E-2,-6.550596E-2,2.7767843E-2,3.9322723E-2,4.4529697E-1,4.0123895E-1,-9.087775E-2,-2.2341023E-1,-2.0578697E-3,5.0353885E-1,1.1285929E-2,3.5061103E-1,-6.2359605E-2,-2.6297176E-1,6.170135E-1,1.70297E-1,9.426196E-3,-2.6209405E-1,3.9531417E-2,-1.5391733E-2,-3.9583874E-1,4.8420545E-1,-3.2310218E-2,-7.6785707E-3,2.2420427E-1,-5.1830076E-2,1.1392743E-1,-2.0336507E-1,7.3306763E-1,-3.5566333E-1,2.4882424E-1,1.8628778E-2,-1.3650638E-1,-2.6893742E-2,7.445787E-1,-4.5362297E-1,1.1576313E-1,2.926568E-1,-1.5492293E-1,-3.1940737E-1,2.3304036E-1,-5.7005453E-1,1.4964243E-2,-4.133179E-2,2.2474645E-1,1.7180851E-1,-5.763233E-2,6.560955E-2,-2.4746469E-1,4.0818876E-1,-9.71825E-2,-2.1891847E-1,1.1543738E-1,2.0164594E-2,2.6525003E-1,1.01452574E-1,-1.5318269E-1,-5.575831E-2,1.3980906E-1,2.5231352E-1,1.16081145E-2,-1.6212079E-1,8.167733E-2,-1.437935E-1,1.16305314E-1,-9.526993E-2,1.18975125E-1,7.84123E-3,-1.4983639E-1,-2.2037874E-1,-1.8586807E-2,1.4782488E-1,-3.4771923E-2,7.334489E-3,-2.0678371E-1,1.9414413E-1,-7.736416E-2,-2.8322063E-2,1.1796385E-1,3.0127048E-2,-1.4642209E-1,-1.18656844E-1,1.1110562E-1,-1.032404E-1,1.4895983E-1,9.803041E-3,-1.3421795E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":71,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,-1,-1,39,41,43,45,47,49,-1,-1,51,53,55,57,59,61,-1,-1,-1,63,65,67,69,71,73,75,77,79,-1,-1,-1,81,83,85,87,89,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.7008983E-1,9.509252E-1,4.604904E-1,1.4425898E0,8.0438805E-1,1.0756956E0,8.139272E-1,1.0917768E0,1.628206E0,2.0411313E-1,9.9461424E-1,1.0979269E0,1.1576971E0,4.6764803E-1,6.669777E-1,1.9444282E0,2.4914978E0,2.1886392E-1,9.682152E-1,0E0,0E0,1.4412884E0,1.3197504E0,1.2066383E0,1.2101018E0,8.628442E-1,1.1670694E0,0E0,0E0,1.4837942E0,2.4728587E0,1.594563E0,4.667666E-1,1.6740184E0,1.3483036E0,0E0,0E0,0E0,5.515995E-1,9.621196E-1,1.3592455E0,8.6127216E-1,9.6727073E-1,7.2015E-1,8.5711396E-1,8.5428977E-1,1.1973536E0,0E0,0E0,0E0,1.2301879E0,8.336428E-1,2.3574188E0,1.2711158E0,1.088544E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,21,21,22,22,23,23,24,24,25,25,26,26,29,29,30,30,31,31,32,32,33,33,34,34,38,38,39,39,40,40,41,41,42,42,43,43,44,44,45,45,46,46,50,50,51,51,52,52,53,53,54,54],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,-1,-1,40,42,44,46,48,50,-1,-1,52,54,56,58,60,62,-1,-1,-1,64,66,68,70,72,74,76,78,80,-1,-1,-1,82,84,86,88,90,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.0994488E0,1.3903836E0,5.868313E-2,-6.396795E-1,-1.3590488E0,-4.8561495E-2,-9.02426E-1,-5.6167686E-1,-5.439832E-1,1.683855E0,1.7677041E0,-4.803529E-1,4.5194067E-3,-4.862524E-1,-2.9598737E-2,-8.708964E-1,-8.394876E-2,-3.9281076E-1,-1.2182465E0,1.70297E-1,9.426196E-3,3.825173E-1,-2.1057709E-1,-6.147578E-1,1.4247327E-1,-4.657865E-1,-9.874513E-1,-7.6785707E-3,2.2420427E-1,-1.039405E0,2.3453663E-1,7.8226164E-2,-9.155495E-1,-9.1293585E-1,-6.289329E-1,1.8628778E-2,-1.3650638E-1,-2.6893742E-2,7.8739315E-1,6.3984674E-1,-3.113201E-1,-3.3469358E-1,1.6376211E-1,-1.6608919E-1,-2.7857873E-1,-8.603547E-1,-1.006067E0,-4.133179E-2,2.2474645E-1,1.7180851E-1,8.467204E-2,-2.5497583E-1,4.2916662E-1,-6.289329E-1,-1.7224395E-1,-2.1891847E-1,1.1543738E-1,2.0164594E-2,2.6525003E-1,1.01452574E-1,-1.5318269E-1,-5.575831E-2,1.3980906E-1,2.5231352E-1,1.16081145E-2,-1.6212079E-1,8.167733E-2,-1.437935E-1,1.16305314E-1,-9.526993E-2,1.18975125E-1,7.84123E-3,-1.4983639E-1,-2.2037874E-1,-1.8586807E-2,1.4782488E-1,-3.4771923E-2,7.334489E-3,-2.0678371E-1,1.9414413E-1,-7.736416E-2,-2.8322063E-2,1.1796385E-1,3.0127048E-2,-1.4642209E-1,-1.18656844E-1,1.1110562E-1,-1.032404E-1,1.4895983E-1,9.803041E-3,-1.3421795E-1],"split_indices":[7,10,9,3,8,6,3,6,6,10,10,5,6,4,5,3,6,1,9,0,0,8,1,6,0,7,5,0,0,6,6,2,6,3,0,0,0,0,5,1,5,2,6,0,6,0,7,0,0,0,4,5,6,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.994777E2,8.9542755E1,2.0993495E2,4.3855198E1,4.5687557E1,1.06414406E2,1.0352054E2,3.2801823E1,1.1053377E1,2.6063335E0,4.3081223E1,2.9779797E1,7.663461E1,2.4766936E0,1.01043846E2,7.4737444E0,2.5328077E1,2.0532072E0,9.00017E0,1.4285343E0,1.1777992E0,1.8189783E1,2.4891441E1,1.3990664E1,1.5789133E1,3.6096175E0,7.3024994E1,1.0635365E0,1.4131571E0,6.2875607E1,3.816824E1,3.2436655E0,4.2300787E0,1.2965046E1,1.2363032E1,1.0138341E0,1.0393732E0,1.4217136E0,7.578456E0,1.1938075E1,6.251707E0,1.0594865E1,1.4296577E1,6.163035E0,7.827629E0,1.0814988E1,4.974145E0,1.2216811E0,2.3879364E0,2.065235E0,7.095976E1,3.970518E1,2.3170424E1,1.555414E1,2.26141E1,1.5847282E0,1.6589372E0,1.0496039E0,3.180475E0,2.150896E0,1.081415E1,4.167365E0,8.195667E0,6.4988475E0,1.0796087E0,1.0891508E1,1.0465673E0,1.7215554E0,4.530152E0,1.2426693E0,9.352196E0,9.977354E0,4.319223E0,1.6495268E0,4.513508E0,4.2513723E0,3.576257E0,1.939379E0,8.875609E0,1.0860081E0,3.8881369E0,6.63392E1,4.620557E0,3.8126526E1,1.5786557E0,1.898139E1,4.189033E0,1.3302815E0,1.4223859E1,1.7158401E1,5.4556994E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"91","size_leaf_vector":"1"}},{"base_weights":[1.167668E-3,2.7264878E-2,-4.0096033E-2,1.4900396E-2,3.3637443E-1,4.6512413E-1,-5.375619E-2,2.140149E-2,-3.0256313E-1,5.178689E-1,-5.4228704E-2,3.280147E-2,1.7597762E-1,-2.7077594E-1,-1.0493313E-2,7.075964E-4,1.6266988E-1,3.969547E-2,-1.5662378E-1,2.3865886E-1,-2.0095836E-2,-6.547464E-1,-7.994405E-2,1.90651E-3,-1.4072362E-1,1.4565381E-2,-4.5715728E-1,-3.411491E-2,4.649206E-1,-1.0543064E-3,-2.4484183E-1,4.2955875E-1,-2.6145154E-1,5.3212464E-2,-1.2620704E-1,1.1827319E-2,-5.499259E-2,-2.6048687E-1,1.9360831E-2,7.030905E-2,-1.5499865E-1,-2.9507678E-2,1.842741E-1,6.798664E-3,1.7716491E-1,-1.3905394E-1,2.5304906E-2,-2.143407E-3,9.06709E-2,5.676552E-2,-8.160409E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":72,"left_children":[1,3,5,7,9,11,13,15,17,19,-1,-1,-1,21,23,25,27,-1,-1,-1,-1,29,31,33,-1,35,37,39,41,-1,-1,43,45,47,49,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[3.1915212E-1,6.887656E-1,8.175066E-1,3.6886388E-1,7.4454665E-1,7.098848E-2,1.055342E0,5.0562644E-1,4.117593E-1,9.2896605E-1,0E0,0E0,0E0,1.3173702E0,5.411725E-1,9.704539E-1,1.3303066E0,0E0,0E0,0E0,0E0,6.3479304E-1,1.3724347E0,6.213378E-1,0E0,7.372257E-1,1.0197592E0,1.9758627E0,7.9916775E-1,0E0,0E0,2.4586177E-1,7.8454614E-1,1.0162852E0,1.2948613E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,13,13,14,14,15,15,16,16,21,21,22,22,23,23,25,25,26,26,27,27,28,28,31,31,32,32,33,33,34,34],"right_children":[2,4,6,8,10,12,14,16,18,20,-1,-1,-1,22,24,26,28,-1,-1,-1,-1,30,32,34,-1,36,38,40,42,-1,-1,44,46,48,50,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.5239425E-1,6.4252335E-1,-1.0917766E0,5.5844444E-1,1.0629177E0,1.3264844E-1,-6.805919E-1,-9.087947E-3,-9.52706E-1,-3.0166942E-1,-5.4228704E-2,3.280147E-2,1.7597762E-1,-1.6608919E-1,2.3728786E0,-7.21471E-2,2.1586609E-1,3.969547E-2,-1.5662378E-1,2.3865886E-1,-2.0095836E-2,-3.3469358E-1,-1.9705367E-1,3.929833E-1,-1.4072362E-1,-6.225345E-1,8.3089806E-2,-5.4334486E-1,-1.1361345E0,-1.0543064E-3,-2.4484183E-1,-3.3469358E-1,5.599659E-1,2.6745534E-2,-7.3706096E-1,1.1827319E-2,-5.499259E-2,-2.6048687E-1,1.9360831E-2,7.030905E-2,-1.5499865E-1,-2.9507678E-2,1.842741E-1,6.798664E-3,1.7716491E-1,-1.3905394E-1,2.5304906E-2,-2.143407E-3,9.06709E-2,5.676552E-2,-8.160409E-2],"split_indices":[7,3,0,3,3,10,9,3,7,8,0,0,0,0,8,3,2,0,0,0,0,2,2,5,0,7,5,4,7,0,0,2,2,5,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.9435562E2,1.8052312E2,1.1383251E2,1.7458775E2,5.9353623E0,2.1267054E0,1.117058E2,1.7199811E2,2.5896325E0,4.3923917E0,1.5429709E0,1.0248898E0,1.1018156E0,1.769308E1,9.4012726E1,1.5089854E2,2.109957E1,1.0019351E0,1.5876974E0,2.7341394E0,1.6582521E0,5.0669403E0,1.262614E1,9.250724E1,1.5054873E0,1.4740535E2,3.4931965E0,1.3316679E1,7.7828903E0,1.2092615E0,3.857679E0,2.9575357E0,9.668604E0,6.647176E1,2.6035471E1,1.3166498E2,1.5740377E1,1.582055E0,1.9111416E0,8.886661E0,4.4300184E0,1.7024258E0,6.0804644E0,1.1618315E0,1.7957041E0,5.8877935E0,3.7808106E0,5.4285603E1,1.2186163E1,8.136321E0,1.7899149E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"51","size_leaf_vector":"1"}},{"base_weights":[-3.7087934E-4,-1.2836921E-2,7.252004E-2,6.823361E-4,-1.5306093E-1,2.3571317E-1,-2.5469525E-2,-3.687834E-2,5.4947775E-2,-3.6976352E-1,1.2013784E-1,3.0309486E-1,-9.8323636E-2,-1.008688E-1,4.701204E-1,-6.767279E-3,-2.0188142E-1,1.7417636E-2,3.287593E-1,-5.2931666E-1,6.235939E-2,1.4716613E-1,-1.2710563E-2,3.9521366E-1,-1.8982516E-1,1.3409177E-2,-4.818696E-1,2.1770392E-2,2.1714862E-1,1.0657902E-2,-4.7131678E-1,-3.5874856E-1,2.3629309E-1,-3.0369228E-2,2.0596033E-1,-1.0206455E-1,4.7028172E-1,-6.424108E-1,5.1944926E-2,1.3218136E-1,-8.544967E-2,-3.2418716E-1,2.0594662E-1,6.272647E-1,7.7344276E-2,-6.949619E-2,-1.6435621E-2,9.833017E-2,-4.448974E-1,-2.186775E-1,9.811204E-2,-3.530529E-3,9.818189E-2,-2.1069492E-1,5.0349236E-3,-4.238026E-2,-2.206678E-1,1.2189711E-1,-7.80154E-2,2.1536713E-2,-4.913954E-2,2.1420749E-1,2.3477402E-2,-9.686228E-4,2.26523E-1,-5.7502657E-2,-2.1461959E-1,-3.6591364E-3,-1.7113529E-1,1.9534044E-1,-2.378227E-2,9.822354E-2,2.507196E-1,1.3902153E-1,-9.121918E-2,-4.8487026E-2,1.1828836E-1,-1.7457548E-1,-2.8944653E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":73,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,-1,25,27,29,31,33,35,37,39,-1,41,43,45,47,49,-1,-1,51,53,55,57,59,61,-1,63,65,-1,-1,-1,67,69,71,73,-1,-1,75,77,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.6669005E-1,4.751887E-1,6.914662E-1,4.699403E-1,1.3493383E0,7.136188E-1,1.0819482E0,6.7564493E-1,9.633585E-1,8.97382E-1,5.2315575E-1,7.5480354E-1,0E0,1.0885599E0,3.3403158E-1,9.462027E-1,1.5278058E0,7.63013E-1,1.2309365E0,8.470135E-1,6.8695056E-1,0E0,6.5972704E-1,9.058161E-1,4.4396743E-3,8.257033E-1,1.3898008E0,0E0,0E0,8.0651575E-1,4.8823422E-1,1.1955321E0,6.2362736E-1,9.370257E-1,1.0543716E0,0E0,1.3181152E0,1.322968E-1,0E0,0E0,0E0,2.9879317E-1,7.7880174E-1,1.8505883E-1,1.1127508E0,0E0,0E0,1.4308681E0,1.0244048E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,22,22,23,23,24,24,25,25,26,26,29,29,30,30,31,31,32,32,33,33,34,34,36,36,37,37,41,41,42,42,43,43,44,44,47,47,48,48],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,-1,26,28,30,32,34,36,38,40,-1,42,44,46,48,50,-1,-1,52,54,56,58,60,62,-1,64,66,-1,-1,-1,68,70,72,74,-1,-1,76,78,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.066704E0,9.4228196E-1,1.3154992E0,-3.874429E-1,1.0127703E0,5.414116E-1,3.902867E-1,2.753794E-1,3.968685E-1,1.3846228E0,-3.2438374E-1,9.282538E-1,-9.8323636E-2,-5.5560064E-1,1.9309677E-1,-4.5050204E-1,6.9351125E-1,5.53022E-1,5.9381795E-1,1.4176449E-1,-3.445578E-1,1.4716613E-1,1.0349928E0,4.6804446E-1,2.8468606E-1,2.345772E-1,9.845981E-1,2.1770392E-2,2.1714862E-1,1.8732673E0,-1.7727183E-1,1.9592093E-1,1.2224419E0,7.8226164E-2,-6.7923063E-1,-1.0206455E-1,-5.9413753E-2,-3.9751104E-1,5.1944926E-2,1.3218136E-1,-8.544967E-2,-6.35789E-1,1.1818031E0,1.3264844E-1,-8.993449E-1,-6.949619E-2,-1.6435621E-2,-7.8681755E-1,2.5842196E-1,-2.186775E-1,9.811204E-2,-3.530529E-3,9.818189E-2,-2.1069492E-1,5.0349236E-3,-4.238026E-2,-2.206678E-1,1.2189711E-1,-7.80154E-2,2.1536713E-2,-4.913954E-2,2.1420749E-1,2.3477402E-2,-9.686228E-4,2.26523E-1,-5.7502657E-2,-2.1461959E-1,-3.6591364E-3,-1.7113529E-1,1.9534044E-1,-2.378227E-2,9.822354E-2,2.507196E-1,1.3902153E-1,-9.121918E-2,-4.8487026E-2,1.1828836E-1,-1.7457548E-1,-2.8944653E-2],"split_indices":[8,6,8,3,5,4,3,1,1,6,3,5,0,3,9,3,8,6,10,4,9,0,7,10,2,7,5,0,0,9,8,8,0,2,7,0,2,0,0,0,0,1,5,10,7,0,0,3,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.9147324E2,2.496083E2,4.1864933E1,2.2856688E2,2.1041424E1,1.5179424E1,2.6685509E1,1.3526292E2,9.330395E1,1.1536866E1,9.504559E0,1.3970796E1,1.2086285E0,2.3852974E1,2.832535E0,1.1526878E2,1.999414E1,8.299219E1,1.0311762E1,8.261531E0,3.2753348E0,1.7981805E0,7.706378E0,1.1937997E1,2.0327995E0,1.909146E1,4.761516E0,1.6044478E0,1.2280872E0,1.1204312E2,3.2256627E0,1.4856689E1,5.137451E0,6.68801E1,1.611209E1,1.5554073E0,8.756354E0,7.1895385E0,1.0719928E0,1.4387966E0,1.8365382E0,2.9794836E0,4.7268944E0,6.3378778E0,5.6001186E0,1.0058008E0,1.0269986E0,1.6769615E1,2.321843E0,3.7232492E0,1.0382667E0,1.0553103E2,6.5120945E0,1.8915727E0,1.3340899E0,1.0291642E1,4.5650473E0,3.9616823E0,1.1757689E0,3.8140396E1,2.8739702E1,2.3141007E0,1.3797988E1,3.6599126E0,5.0964417E0,1.5073079E0,5.6822305E0,1.7773273E0,1.2021563E0,1.3448559E0,3.3820384E0,3.6534433E0,2.6844344E0,2.676244E0,2.9238749E0,9.169608E0,7.6000075E0,1.1854547E0,1.1363882E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"79","size_leaf_vector":"1"}},{"base_weights":[-9.899131E-5,-2.3654215E-1,3.5885216E-3,2.3108851E-2,-1.2545824E-1,3.023925E-1,-2.07812E-4,1.8529645E-1,-4.7631394E-2,-1.8681112E-1,5.496084E-3,-1.564947E-1,-8.453777E-3,1.4575553E-1,-4.04167E-3,1.8322848E-1,-2.6527247E-1,-3.0112562E-1,2.3194772E-1,1.6809935E-3,-3.5270676E-1,1.0959313E-1,-5.1887654E-2,-1.790177E-1,5.0800562E-2,-1.587105E-1,2.17176E-2,1.2475351E-1,-9.498466E-3,-1.7896981E-3,1.257474E-1,-1.746997E-1,5.6175087E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":74,"left_children":[1,3,5,-1,-1,7,9,-1,-1,11,13,-1,15,17,19,21,23,25,27,29,31,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.5234395E-1,2.8537312E-1,3.2336065E-1,0E0,0E0,6.1801124E-1,3.0163392E-1,0E0,0E0,4.9757323E-1,3.6850375E-1,0E0,3.686193E-1,7.45368E-1,5.16346E-1,3.429609E-1,5.601165E-1,3.167366E-1,7.532736E-1,8.175148E-1,6.271584E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,5,5,6,6,9,9,10,10,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20],"right_children":[2,4,6,-1,-1,8,10,-1,-1,12,14,-1,16,18,20,22,24,26,28,30,32,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.4382055E0,1.9885693E-1,-1.8179249E0,2.3108851E-2,-1.2545824E-1,3.825173E-1,-1.3255169E0,1.8529645E-1,-4.7631394E-2,7.664258E-1,-1.4286649E0,-1.564947E-1,4.5761305E-1,-3.9281076E-1,1.1694285E0,1.5234454E0,1.0346863E0,1.2995825E0,6.3984674E-1,9.981511E-1,9.2987575E-2,1.0959313E-1,-5.1887654E-2,-1.790177E-1,5.0800562E-2,-1.587105E-1,2.17176E-2,1.2475351E-1,-9.498466E-3,-1.7896981E-3,1.257474E-1,-1.746997E-1,5.6175087E-2],"split_indices":[5,4,6,0,0,8,5,0,0,2,6,0,1,1,4,2,1,0,1,4,6,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.8744913E2,3.4444485E0,2.8400467E2,1.4747143E0,1.9697343E0,2.5762706E0,2.814284E2,1.3286413E0,1.2476294E0,7.405501E0,2.740229E2,1.9047992E0,5.5007014E0,1.6537994E1,2.574849E2,3.3138685E0,2.1868331E0,2.2708182E0,1.4267176E1,2.5430614E2,3.1787648E0,2.1759272E0,1.1379412E0,1.0290259E0,1.1578071E0,1.1517162E0,1.1191019E0,8.06407E0,6.203107E0,2.5069995E2,3.6061976E0,2.1752326E0,1.0035323E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"33","size_leaf_vector":"1"}},{"base_weights":[-9.7367645E-4,1.4857062E-2,-5.663769E-2,7.958141E-3,3.6322567E-1,-2.064716E-1,1.7082501E-2,3.2217163E-3,1.1035262E-1,1.7852873E-1,6.9326237E-3,-3.9728326E-1,4.8165873E-2,-2.5225466E-1,5.3567216E-2,1.2941955E-2,-2.2525428E-1,-5.880591E-1,2.747436E-1,4.9930578E-1,-2.4288964E-1,8.555567E-2,-4.4619596E-1,9.389071E-2,-2.7816376E-1,-2.924764E-3,1.8044315E-1,1.2378306E-1,-4.1504934E-1,1.5908627E-2,-2.042663E-1,1.7392313E-3,1.1643455E-1,8.0241877E-1,-5.8555067E-2,-4.461301E-1,5.8407903E-2,-1.7323618E-1,-4.630539E-3,4.003201E-2,4.7112188E-1,7.269706E-2,-1.9883668E-1,-1.0329042E-1,2.0818068E-3,9.787196E-2,-1.1053481E-1,-1.562056E-1,3.938991E-2,6.845581E-2,2.953651E-1,-2.5679482E-2,-2.101347E-1,7.388023E-2,-2.4306376E-2,1.8443091E-1,1.1685669E-2,-1.1066563E-1,1.3970658E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":75,"left_children":[1,3,5,7,9,11,13,15,-1,-1,-1,17,19,21,23,25,27,29,31,33,35,-1,37,39,41,43,45,-1,47,-1,-1,-1,-1,49,-1,51,-1,-1,-1,53,55,57,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.541115E-1,5.368231E-1,7.0878416E-1,3.7591055E-1,3.2838923E-1,1.045863E0,4.431073E-1,4.900839E-1,0E0,0E0,0E0,1.7241105E0,1.4269469E0,6.8912476E-1,5.5217004E-1,5.622481E-1,1.2615516E0,6.1998916E-1,1.0069975E-1,1.0463794E0,6.842271E-1,0E0,2.3710269E-1,7.0873713E-1,6.5742606E-1,6.5583515E-1,1.5850506E0,0E0,4.9771905E-1,0E0,0E0,0E0,0E0,1.0354209E-1,0E0,3.8844872E-1,0E0,0E0,0E0,8.2959735E-1,2.4804789E-1,7.3788095E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,22,22,23,23,24,24,25,25,26,26,28,28,33,33,35,35,39,39,40,40,41,41],"right_children":[2,4,6,8,10,12,14,16,-1,-1,-1,18,20,22,24,26,28,30,32,34,36,-1,38,40,42,44,46,-1,48,-1,-1,-1,-1,50,-1,52,-1,-1,-1,54,56,58,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.24572E-1,1.7788109E-1,6.414551E-1,1.4538637E0,-1.7224395E-1,5.691137E-1,-2.432298E-1,1.6716468E0,1.1035262E-1,1.7852873E-1,6.9326237E-3,4.5761305E-1,-3.3206618E-1,-4.2915997E-1,2.2765083E0,-2.9512849E-2,-1.0146359E0,-1.8002312E0,-5.3458095E-1,4.346811E-1,-5.5560064E-1,8.555567E-2,8.352457E-1,1.8732673E0,-8.908796E-2,-4.5429462E-1,5.599659E-1,1.2378306E-1,1.9914937E-1,1.5908627E-2,-2.042663E-1,1.7392313E-3,1.1643455E-1,-7.8765035E-1,-5.8555067E-2,-1.4817458E0,5.8407903E-2,-1.7323618E-1,-4.630539E-3,1.4704613E-1,5.069149E-1,2.5338323E0,-1.9883668E-1,-1.0329042E-1,2.0818068E-3,9.787196E-2,-1.1053481E-1,-1.562056E-1,3.938991E-2,6.845581E-2,2.953651E-1,-2.5679482E-2,-2.101347E-1,7.388023E-2,-2.4306376E-2,1.8443091E-1,1.1685669E-2,-1.1066563E-1,1.3970658E-1],"split_indices":[7,7,7,0,4,8,0,9,0,0,0,1,1,4,9,4,0,6,3,7,3,0,2,9,1,10,2,0,6,0,0,0,0,1,0,6,0,0,0,2,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.864141E2,2.2356557E2,6.2848526E1,2.2022714E2,3.3384266E0,2.0131386E1,4.271714E1,2.1836223E2,1.8649088E0,1.5393296E0,1.799097E0,1.1187718E1,8.943667E0,4.390637E0,3.8326504E1,2.1035623E2,8.006E0,8.811301E0,2.3764174E0,3.2267156E0,5.716952E0,1.0395143E0,3.3511224E0,3.4811916E1,3.5145886E0,1.9305176E2,1.7304482E1,1.566688E0,6.439312E0,1.1687158E0,7.6425858E0,1.0163987E0,1.3600186E0,2.1381166E0,1.088599E0,3.8904521E0,1.8264999E0,2.307466E0,1.0436563E0,3.1430576E1,3.3813398E0,2.262079E0,1.2525096E0,4.469437E0,1.8858232E2,1.399324E1,3.3112414E0,5.435364E0,1.0039482E0,1.0573422E0,1.0807743E0,2.1620412E0,1.7284108E0,1.1242551E1,2.0188025E1,2.2206905E0,1.1606494E0,1.094072E0,1.168007E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"59","size_leaf_vector":"1"}},{"base_weights":[-6.0298725E-4,2.1589853E-1,-4.7800587E-3,1.7235772E-1,-8.1094686E-4,-4.0108003E-2,2.2189027E-2,-9.1320455E-2,1.9448629E-1,9.822689E-2,-8.6050674E-2,-4.0642746E-2,7.5886525E-2,1.0951794E-1,-2.2231504E-2,3.510198E-1,-2.4644563E-2,1.6112967E-1,-1.18846565E-1,6.0017216E-3,-3.310771E-1,1.3966328E-1,-1.741877E-1,-2.653373E-1,5.7705677E-1,-3.8784307E-1,1.6657445E-1,3.5063568E-1,-4.1381526E-1,-3.6636037E-1,-7.077976E-2,4.4791606E-1,-2.7408887E-2,6.984904E-2,-7.6131934E-1,2.5526391E-2,2.8118628E-1,-6.799473E-1,2.4439001E-2,7.624361E-2,-1.9928345E-1,-8.113335E-2,2.2896564E-1,4.3304056E-2,-1.6398981E-1,-4.1272867E-2,1.4859283E-1,-2.9467182E-2,1.7692141E-1,-1.7699331E-1,-3.972109E-3,-1.7299636E-1,6.547374E-2,4.286261E-3,-7.787584E-2,-4.6196193E-2,2.2562338E-1,-7.857354E-2,5.2432334E-3,-6.398779E-2,1.7970116E-1,-2.5817195E-1,-5.7476893E-2,6.952046E-2,-4.751733E-2,-8.761904E-2,1.0130974E-1,-2.2618231E-1,-6.0094323E-2,1.1648286E-1,-4.613359E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":76,"left_children":[1,3,5,-1,7,9,11,-1,13,15,17,19,21,-1,-1,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,57,59,61,63,65,67,69,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.588366E-1,4.2151278E-1,2.6828888E-1,0E0,3.1863216E-1,7.829022E-1,5.422381E-1,0E0,1.7699726E-1,9.66792E-1,7.6501644E-1,1.0083865E0,1.4041064E0,0E0,0E0,1.5679022E0,1.5711286E0,1.3656387E0,9.4733477E-1,9.7184265E-1,1.8294917E0,1.1152523E0,1.8273534E0,8.754956E-1,1.4283736E0,7.365371E-1,1.558919E0,1.0202427E0,2.2051823E-1,1.7318983E0,1.1238612E0,9.6269727E-1,6.505039E-1,1.0636414E0,1.2076378E-1,1.5372313E0,1.1003826E0,3.059268E-2,9.614989E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,8,8,9,9,10,10,11,11,12,12,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38],"right_children":[2,4,6,-1,8,10,12,-1,14,16,18,20,22,-1,-1,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.2736068E0,-4.732087E-1,-6.6069925E-1,1.7235772E-1,-7.8681755E-1,-8.4839493E-1,5.09969E-1,-9.1320455E-2,-9.087947E-3,-4.3782142E-1,-1.0146359E0,1.5389027E-1,4.4932756E-1,1.0951794E-1,-2.2231504E-2,-1.1217455E0,-3.0166942E-1,-1.31742265E-2,-8.708964E-1,-8.1596833E-1,-1.0924859E0,-1.6608919E-1,-6.126634E-1,-6.126634E-1,-8.270814E-1,-5.461783E-1,5.868313E-2,-6.787934E-1,1.9577834E-1,6.9351125E-1,6.9351125E-1,-5.7189107E-1,-1.0164239E-1,-1.2163413E0,6.2878585E-1,1.3903836E0,-1.5456452E0,5.794642E-1,-1.1507303E-1,7.624361E-2,-1.9928345E-1,-8.113335E-2,2.2896564E-1,4.3304056E-2,-1.6398981E-1,-4.1272867E-2,1.4859283E-1,-2.9467182E-2,1.7692141E-1,-1.7699331E-1,-3.972109E-3,-1.7299636E-1,6.547374E-2,4.286261E-3,-7.787584E-2,-4.6196193E-2,2.2562338E-1,-7.857354E-2,5.2432334E-3,-6.398779E-2,1.7970116E-1,-2.5817195E-1,-5.7476893E-2,6.952046E-2,-4.751733E-2,-8.761904E-2,1.0130974E-1,-2.2618231E-1,-6.0094323E-2,1.1648286E-1,-4.613359E-2],"split_indices":[1,6,3,0,3,1,10,0,3,6,0,1,5,0,0,1,8,6,3,7,6,0,8,8,6,9,9,2,5,8,8,4,6,6,2,10,8,3,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.8407413E2,4.4176307E0,2.796565E2,1.0420442E0,3.3755865E0,1.2085553E2,1.5880096E2,1.1027297E0,2.272857E0,2.984705E1,9.1008484E1,7.328861E1,8.5512344E1,1.1704059E0,1.102451E0,9.154986E0,2.0692064E1,1.0202189E1,8.080629E1,6.3990864E1,9.297752E0,6.848742E1,1.7024921E1,2.4098759E0,6.7451105E0,6.7820587E0,1.3910006E1,7.9665103E0,2.2356791E0,1.2063736E1,6.874255E1,3.6258736E0,6.036499E1,5.2464447E0,4.051308E0,3.8565277E1,2.992214E1,4.1174617E0,1.290746E1,1.2044481E0,1.2054276E0,1.1332519E0,5.611859E0,1.5794437E0,5.202615E0,7.527211E0,6.3827944E0,2.9735506E0,4.9929595E0,1.2243948E0,1.0112844E0,8.882229E0,3.1815076E0,4.8027805E1,2.0714752E1,1.3829324E0,2.242941E0,8.921379E0,5.144361E1,3.8065455E0,1.4398992E0,3.015481E0,1.035827E0,1.80583E1,2.0506979E1,2.3110847E0,2.7611055E1,3.071677E0,1.0457847E0,3.8562124E0,9.051249E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"71","size_leaf_vector":"1"}},{"base_weights":[-2.252259E-3,-1.1027869E-2,1.0172513E-1,-3.399548E-1,-7.013864E-3,3.8056538E-1,5.1629986E-3,-1.7572659E-1,2.4733078E-2,-1.491589E-3,-2.3457676E-1,5.9204084E-1,-6.528615E-2,-1.5960228E-1,1.7019033E-1,2.1390845E-1,-1.320396E-2,4.8626177E-2,-4.4077426E-1,2.319344E-1,7.1639985E-2,9.982219E-2,-3.2123116E-1,-1.4780335E-1,3.7428528E-1,3.7188244E-1,-3.0219892E-1,-3.0575186E-1,-5.94395E-3,-2.2855851E-1,-1.1128804E-1,-4.719958E-1,7.425337E-2,7.778104E-2,-1.5101252E-1,5.277413E-1,8.810506E-3,-5.7775304E-2,1.6105981E-1,-1.5773508E-1,2.4293775E-2,-1.8355282E-1,1.63854E-2,-1.989157E-2,6.8866606E-3,-1.2581347E-1,7.795316E-2,-1.8374452E-1,2.854802E-2,1.8636595E-1,4.3370135E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":77,"left_children":[1,3,5,7,9,11,13,-1,-1,15,17,19,-1,21,23,25,27,-1,29,-1,-1,-1,31,33,35,37,39,41,43,-1,45,47,-1,-1,-1,49,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.579274E-1,3.4147707E-1,5.9046173E-1,3.7210765E-1,3.2427204E-1,8.430431E-1,4.9910524E-1,0E0,0E0,6.4162505E-1,5.714941E-1,9.937155E-2,0E0,8.3909726E-1,6.4951485E-1,1.1817664E0,5.1023436E-1,0E0,3.692301E-1,0E0,0E0,0E0,7.544339E-1,7.523198E-1,2.9464066E-1,1.0803645E0,3.5318664E-1,6.75211E-1,4.1367716E-1,0E0,4.850575E-1,5.5402637E-1,0E0,0E0,0E0,5.333495E-2,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,9,9,10,10,11,11,13,13,14,14,15,15,16,16,18,18,22,22,23,23,24,24,25,25,26,26,27,27,28,28,30,30,31,31,35,35],"right_children":[2,4,6,8,10,12,14,-1,-1,16,18,20,-1,22,24,26,28,-1,30,-1,-1,-1,32,34,36,38,40,42,44,-1,46,48,-1,-1,-1,50,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9354022E0,-1.7998126E0,2.019251E0,-5.9413753E-2,1.8515531E0,3.9471725E-1,-3.445578E-1,-1.7572659E-1,2.4733078E-2,-1.5897034E0,-6.186598E-1,-6.86076E-1,-6.528615E-2,-3.8474053E-1,-5.262896E-1,-7.447781E-1,-9.1293585E-1,4.8626177E-2,-5.7662034E-1,2.319344E-1,7.1639985E-2,9.982219E-2,3.0531117E-1,-1.039405E0,-2.6132458E-1,-5.6167686E-1,-8.2178444E-1,4.447161E-1,-4.26067E-1,-2.2855851E-1,-1.4228727E-1,2.522345E0,7.425337E-2,7.778104E-2,-1.5101252E-1,1.4606847E-1,8.810506E-3,-5.7775304E-2,1.6105981E-1,-1.5773508E-1,2.4293775E-2,-1.8355282E-1,1.63854E-2,-1.989157E-2,6.8866606E-3,-1.2581347E-1,7.795316E-2,-1.8374452E-1,2.854802E-2,1.8636595E-1,4.3370135E-2],"split_indices":[10,7,10,2,10,9,9,0,0,7,3,4,0,6,6,3,3,0,3,0,0,0,6,6,3,6,0,8,8,0,5,10,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.8044156E2,2.5943903E2,2.1002512E1,2.118841E0,2.573202E2,4.645799E0,1.6356712E1,1.0949408E0,1.0239003E0,2.5220645E2,5.1137457E0,3.4399524E0,1.2058469E0,8.201304E0,8.155408E0,1.21162E1,2.4009026E2,1.8222293E0,3.2915165E0,1.4883704E0,1.9515818E0,1.765299E0,6.4360056E0,3.2959328E0,4.8594747E0,9.490672E0,2.625528E0,4.818304E0,2.3527194E2,1.0023983E0,2.2891183E0,5.2218747E0,1.2141308E0,1.6629516E0,1.6329812E0,2.9963315E0,1.8631433E0,2.1087167E0,7.381955E0,1.4230264E0,1.2025017E0,2.2280364E0,2.5902677E0,7.575369E1,1.5951825E2,1.1797591E0,1.109359E0,4.1211405E0,1.1007344E0,1.9092966E0,1.087035E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"51","size_leaf_vector":"1"}},{"base_weights":[-2.2946293E-3,-1.0229314E-2,9.522162E-2,-8.421944E-2,1.0443461E-2,3.563092E-1,2.2401353E-2,-1.7135286E-1,1.3683696E-1,1.7705707E-1,-1.501909E-2,-1.6156863E-2,1.697335E-1,-8.6086646E-2,1.722493E-1,-2.2380893E-1,2.1525337E-1,5.289757E-1,1.5628738E-2,6.32883E-2,5.499491E-1,-1.1648089E-1,1.769666E-2,-3.373208E-1,1.05444275E-1,4.814618E-1,-8.219934E-2,-7.572882E-2,-4.2854914E-1,4.5563218E-1,-9.439448E-2,-4.2281035E-2,3.0167302E-1,1.4851527E-1,-4.0932158E-1,-1.6174917E-1,1.4872895E-1,-9.697756E-3,6.684561E-1,-3.932409E-1,1.3464332E-2,1.8039502E-1,-2.0730525E-2,6.807596E-2,-5.631552E-1,3.0013403E-1,-7.8342445E-2,3.710132E-2,1.9008456E-1,-1.2863691E-1,1.7788865E-1,-1.03147276E-1,2.1014348E-2,6.5195225E-2,-1.6347456E-1,1.9862308E-1,2.2378167E-2,-2.7843086E-2,9.965195E-2,-1.8821031E-1,1.861064E-2,-8.1029445E-2,8.511257E-2,4.5682747E-2,2.3149687E-1,-2.3336163E-1,-1.2402962E-2,2.632829E-2,-9.74443E-2,1.1696319E-1,-4.251094E-2,-2.502808E-2,2.6284995E-2,-1.841768E-2,-2.1748935E-1,1.5780684E-1,-1.46223465E-2,1.7900462E-1,-8.1851155E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":78,"left_children":[1,3,5,7,9,11,13,15,17,19,21,-1,-1,23,25,27,29,31,33,35,37,39,41,43,45,47,49,51,53,55,-1,-1,-1,57,59,-1,61,-1,63,65,67,69,71,-1,73,75,-1,-1,-1,-1,77,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.1735065E-1,3.9781195E-1,3.93919E-1,1.1171405E0,8.669326E-1,4.2078853E-1,3.0087873E-1,8.831359E-1,7.811973E-1,1.1142182E0,5.917118E-1,0E0,0E0,5.587794E-1,6.471796E-1,1.0815272E0,8.3485293E-1,1.384148E0,8.3546734E-1,1.2065921E0,4.495443E-1,1.56771E0,8.489746E-1,7.5748533E-1,5.73086E-1,1.1936146E-1,5.351656E-1,9.0486103E-1,1.2613051E0,2.7651942E-1,0E0,0E0,0E0,5.265258E-1,4.0215373E-1,0E0,1.2043955E0,0E0,1.594472E-1,1.8510938E0,7.958262E-1,1.8050616E0,7.5630486E-1,0E0,2.6534367E-1,4.1908053E-1,0E0,0E0,0E0,0E0,8.451704E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,13,13,14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,33,33,34,34,36,36,38,38,39,39,40,40,41,41,42,42,44,44,45,45,50,50],"right_children":[2,4,6,8,10,12,14,16,18,20,22,-1,-1,24,26,28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,-1,-1,-1,58,60,-1,62,-1,64,66,68,70,72,-1,74,76,-1,-1,-1,-1,78,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9354022E0,-6.289329E-1,-4.5355532E-1,-1.1418654E-1,-1.2662021E0,-8.4839493E-1,-1.3702966E-1,9.995786E-1,1.1931772E-2,-8.908796E-2,-5.4334486E-1,-1.6156863E-2,1.697335E-1,-6.004373E-1,1.10681206E-1,9.406206E-3,1.5020956E0,-7.826177E-1,-4.0061373E-1,-1.7131008E0,-7.6207423E-1,-2.432298E-1,-3.2037044E-1,-8.1500554E-1,-3.113201E-1,-4.7897142E-1,-1.1411514E-1,4.447161E-1,-4.26067E-1,6.4971733E-1,-9.439448E-2,-4.2281035E-2,3.0167302E-1,-1.0146359E0,7.8226164E-2,-1.6174917E-1,-4.803529E-1,-9.697756E-3,-7.992597E-1,-5.9413753E-2,6.1836034E-1,-1.6673584E-1,2.521226E-1,6.807596E-2,-1.6897553E0,-8.6605296E-2,-7.8342445E-2,3.710132E-2,1.9008456E-1,-1.2863691E-1,-3.2037044E-1,-1.03147276E-1,2.1014348E-2,6.5195225E-2,-1.6347456E-1,1.9862308E-1,2.2378167E-2,-2.7843086E-2,9.965195E-2,-1.8821031E-1,1.861064E-2,-8.1029445E-2,8.511257E-2,4.5682747E-2,2.3149687E-1,-2.3336163E-1,-1.2402962E-2,2.632829E-2,-9.74443E-2,1.1696319E-1,-4.251094E-2,-2.502808E-2,2.6284995E-2,-1.841768E-2,-2.1748935E-1,1.5780684E-1,-1.46223465E-2,1.7900462E-1,-8.1851155E-2],"split_indices":[10,0,1,3,7,1,6,9,3,1,4,0,0,4,6,2,8,7,4,7,5,0,0,9,5,9,5,8,8,10,0,0,0,0,2,0,5,0,8,2,5,3,5,0,7,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.786428E2,2.5850397E2,2.0138805E1,5.57816E1,2.0272238E2,3.542966E0,1.6595839E1,4.017194E1,1.5609659E1,2.6084625E1,1.7663776E2,1.4488456E0,2.0941203E0,9.873243E0,6.722595E0,3.5743263E1,4.4286757E0,2.8913224E0,1.2718337E1,2.0882982E1,5.201642E0,4.2444298E1,1.3419347E2,3.9416876E0,5.931556E0,2.6319752E0,4.09062E0,2.1536608E1,1.4206656E1,3.1430526E0,1.285623E0,1.4946805E0,1.3966419E0,1.01841545E1,2.534183E0,1.9342147E0,1.8948769E1,1.0026114E0,4.1990304E0,1.2913867E1,2.9530428E1,2.4933228E1,1.0926023E2,1.1253147E0,2.8163729E0,3.99247E0,1.939086E0,1.326201E0,1.3057743E0,1.475608E0,2.6150122E0,7.107237E0,1.4429371E1,2.036414E0,1.2170243E1,1.5601892E0,1.5828632E0,4.614881E0,5.5692735E0,1.5063672E0,1.0278158E0,4.3744125E0,1.4574356E1,1.1121057E0,3.0869246E0,5.5916147E0,7.3222528E0,2.4819788E1,4.71064E0,1.4980175E1,9.9530525E0,6.9356346E1,3.9903885E1,1.023124E0,1.7932488E0,2.1151745E0,1.8772955E0,1.1876646E0,1.4273475E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"79","size_leaf_vector":"1"}},{"base_weights":[-1.1471965E-3,1.9637805E-3,-2.622526E-1,8.3189756E-2,-9.0525026E-4,-1.3402323E-1,2.2866644E-2,1.016963E-1,-8.228223E-3,-1.555658E-1,2.8513306E-1,2.2867726E-1,-1.4090583E-2,-3.3491623E-1,1.4759585E-1,2.0764779E-1,1.1256218E-1,-1.2656319E-1,4.4052583E-1,-6.1724637E-2,1.3465417E-2,-1.6023374E-1,1.4929344E-2,1.4919943E-1,-5.5365592E-2,1.4030667E-1,-1.3993961E-2,-9.804146E-2,3.93125E-2,-9.256048E-3,1.9344191E-1,-7.6025473E-3,-8.185586E-2,-1.0992981E-1,8.131321E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":79,"left_children":[1,3,5,-1,7,-1,-1,9,11,13,15,17,19,21,23,-1,25,27,29,31,33,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.2468297E-1,2.1646906E-1,2.3742457E-1,0E0,2.051183E-1,0E0,0E0,9.026467E-1,3.5695484E-1,4.9622786E-1,6.926378E-1,5.2299E-1,3.2831356E-1,4.456405E-1,5.272254E-1,0E0,5.2490425E-1,2.0253438E-1,4.1588956E-1,6.9680965E-1,8.3314145E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,17,17,18,18,19,19,20,20],"right_children":[2,4,6,-1,8,-1,-1,10,12,14,16,18,20,22,24,-1,26,28,30,32,34,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[2.188833E0,-1.3647237E0,2.94814E0,8.3189756E-2,-1.6897553E0,-1.3402323E-1,2.2866644E-2,-6.86076E-1,-9.42992E-1,5.069149E-1,-1.016626E0,-4.8826578E-1,-7.0273864E-1,-6.805919E-1,-2.2549783E-1,2.0764779E-1,-2.6587364E-1,-3.9281076E-1,-4.2400864E-1,4.4686025E-1,-1.519667E0,-1.6023374E-1,1.4929344E-2,1.4919943E-1,-5.5365592E-2,1.4030667E-1,-1.3993961E-2,-9.804146E-2,3.93125E-2,-9.256048E-3,1.9344191E-1,-7.6025473E-3,-8.185586E-2,-1.0992981E-1,8.131321E-3],"split_indices":[1,1,1,0,7,0,0,4,4,8,9,8,3,9,6,0,2,1,5,6,7,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.7496936E2,2.7271255E2,2.256793E0,1.8259617E0,2.708866E2,1.2536145E0,1.0031785E0,1.7187447E1,2.5369916E2,7.217356E0,9.97009E0,5.2085195E0,2.4849065E2,4.4688435E0,2.7485127E0,2.0720584E0,7.8980317E0,2.0961485E0,3.112371E0,9.061357E1,1.5787708E2,2.6883168E0,1.7805268E0,1.0965642E0,1.6519486E0,1.8450124E0,6.053019E0,1.0282441E0,1.0679044E0,1.1976793E0,1.9146917E0,7.8249245E1,1.2364323E1,4.575145E0,1.5330193E2],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"35","size_leaf_vector":"1"}},{"base_weights":[-1.16451556E-4,-5.7787903E-2,1.1515535E-2,-2.827765E-1,1.1053033E-2,1.6301125E-2,-2.6122364E-1,-4.689174E-1,6.5998785E-2,4.9513496E-2,-1.6083471E-1,6.1003376E-2,-1.5707398E-2,-1.3160218E-1,2.1702513E-2,-6.559545E-1,9.512912E-3,-9.936599E-2,2.7056733E-1,-2.7982367E-2,2.3338832E-1,1.6465666E-2,1.7918335E-1,2.6703682E-2,-2.4339238E-1,-2.2678214E-1,-5.761442E-2,1.5747647E-1,-4.2464994E-2,1.8724956E-1,-1.8431446E-1,-8.057136E-2,3.5187495E-1,1.5521316E-1,-5.360204E-2,-2.3489264E-1,3.6415964E-1,-1.2303094E-2,3.0258462E-1,-6.459547E-1,-2.3130318E-2,-2.3312617E-2,1.552758E-1,-9.6550204E-2,6.1954655E-2,1.7318584E-1,-1.4426715E-2,-1.0725288E-1,6.993853E-2,-7.858755E-2,1.4557111E-2,-1.6878805E-1,1.089277E-1,1.9192694E-1,4.3516375E-2,5.8010273E-2,-2.0547334E-2,-1.3706191E-1,1.2839006E-1,-2.3779728E-1,-1.2886978E-2,1.645797E-1,-4.881554E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":80,"left_children":[1,3,5,7,9,11,13,15,17,19,-1,21,23,-1,-1,25,-1,-1,27,29,31,33,35,37,39,-1,-1,-1,-1,41,43,-1,45,47,49,51,53,55,57,59,61,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.8498711E-1,7.199783E-1,3.0378214E-1,7.373719E-1,7.925996E-1,3.2499793E-1,2.694397E-1,6.966771E-1,4.6853232E-1,5.0422E-1,0E0,4.9550262E-1,1.288532E0,0E0,0E0,1.0070562E-1,0E0,0E0,4.5463815E-1,8.9131486E-1,7.1678084E-1,6.876841E-1,2.0631588E0,1.2177991E0,1.8423246E0,0E0,0E0,0E0,0E0,1.02136E0,8.8632613E-1,0E0,8.299316E-1,1.0303438E0,1.0165458E0,1.8858845E0,1.0085549E0,1.1666121E0,1.5300331E0,6.1794925E-1,1.2524953E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,11,11,12,12,15,15,18,18,19,19,20,20,21,21,22,22,23,23,24,24,29,29,30,30,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40],"right_children":[2,4,6,8,10,12,14,16,18,20,-1,22,24,-1,-1,26,-1,-1,28,30,32,34,36,38,40,-1,-1,-1,-1,42,44,-1,46,48,50,52,54,56,58,60,62,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.3469358E-1,-9.3749535E-1,1.5690756E0,3.664962E-1,1.0683435E0,-1.7241693E-1,1.9363651E0,1.4398967E0,-2.7735096E-1,5.291309E-1,-1.6083471E-1,-3.8474053E-1,5.963377E-1,-1.3160218E-1,2.1702513E-2,3.6481115E-1,9.512912E-3,-9.936599E-2,-9.52706E-1,-9.660462E-1,-1.6422728E0,-3.638682E-1,-4.26067E-1,2.603036E-1,9.2987575E-2,-2.2678214E-1,-5.761442E-2,1.5747647E-1,-4.2464994E-2,1.2837484E-1,5.612582E-2,-8.057136E-2,3.4227094E-1,-1.9705367E-1,-8.523691E-3,1.3065345E0,-2.9627237E-1,-1.2823372E-1,-1.4228727E-1,9.423064E-1,1.2837484E-1,-2.3312617E-2,1.552758E-1,-9.6550204E-2,6.1954655E-2,1.7318584E-1,-1.4426715E-2,-1.0725288E-1,6.993853E-2,-7.858755E-2,1.4557111E-2,-1.6878805E-1,1.089277E-1,1.9192694E-1,4.3516375E-2,5.8010273E-2,-2.0547334E-2,-1.3706191E-1,1.2839006E-1,-2.3779728E-1,-1.2886978E-2,1.645797E-1,-4.881554E-2],"split_indices":[2,0,4,1,7,6,2,8,9,9,0,6,9,0,0,5,0,0,7,7,2,8,8,9,6,0,0,0,0,6,4,0,10,2,9,10,6,2,5,8,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.7376508E2,4.5283123E1,2.2848196E2,9.881229E0,3.5401894E1,2.2546608E2,3.0158844E0,6.218145E0,3.6630843E0,3.3926697E1,1.4751962E0,9.370054E1,1.3176553E2,1.7629325E0,1.252952E0,4.3010015E0,1.9171436E0,1.0347517E0,2.6283326E0,2.4463943E1,9.462753E0,6.88811E1,2.481944E1,1.11819534E2,1.9946005E1,3.0204842E0,1.2805173E0,1.4559919E0,1.1723408E0,1.0209767E1,1.4254177E1,1.5652044E0,7.8975487E0,2.270522E1,4.617588E1,7.5804596E0,1.7238981E1,9.880492E1,1.3014613E1,6.370424E0,1.3575582E1,6.089901E0,4.1198664E0,1.0674668E1,3.579509E0,4.767399E0,3.1301498E0,2.5218306E0,2.018339E1,1.467367E1,3.150221E1,4.9349303E0,2.6455295E0,6.7848883E0,1.0454093E1,2.067708E1,7.812784E1,1.4695437E0,1.154507E1,4.8708677E0,1.4995561E0,2.0890522E0,1.1486529E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"63","size_leaf_vector":"1"}},{"base_weights":[-2.1059965E-3,2.0644251E-2,-3.8082596E-2,5.570027E-3,2.6672983E-1,1.0900753E-1,-4.9018193E-2,2.3264524E-2,-1.8563627E-1,5.3377485E-1,-2.587601E-1,-2.1540543E-1,-1.6687544E-2,-2.6239146E-2,1.0129445E-1,1.06418416E-1,-2.811739E-1,1.76052E-1,2.2701089E-1,-1.3926189E-3,-1.4193182E-1,-5.833688E-1,-4.3811657E-2,-5.2370984E-2,9.304963E-2,-5.2780606E-2,4.1501197E-1,-2.9709554E-1,1.6458285E-1,7.524198E-2,-3.5953942E-1,1.5734836E-1,-7.992195E-2,-1.1502401E-3,-2.2101323E-1,1.0658272E-1,-1.6925141E-1,9.119207E-3,-2.4590285E-1,1.9506767E-1,-3.0286333E-1,-5.316563E-3,-1.4768729E-1,-2.404066E-2,1.6999643E-1,-1.2281664E-1,6.0505096E-2,3.5842247E-2,1.9761588E-1,-1.6147196E-1,-1.1492491E-2,-5.377551E-2,1.106724E-1,-1.5406455E-2,9.994455E-2,-1.8901332E-1,-1.3869935E-2,1.552687E-1,1.954349E-2,3.5889573E-2,-1.8643574E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":81,"left_children":[1,3,5,7,9,-1,11,13,15,17,19,21,23,25,27,-1,29,31,-1,-1,-1,33,35,37,39,41,43,45,47,-1,49,-1,-1,-1,-1,51,-1,53,55,57,59,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.2291847E-1,6.1767113E-1,4.809047E-1,5.398821E-1,1.4870654E0,0E0,5.535619E-1,5.657593E-1,7.976868E-1,4.01098E-1,2.1091497E-1,1.033102E0,3.4870106E-1,1.0757711E0,1.4807863E0,0E0,5.923669E-1,7.372336E-1,0E0,0E0,0E0,4.6382463E-1,1.0396003E0,7.983663E-1,9.4386286E-1,1.3128403E0,4.372375E-1,5.3857106E-1,1.0279726E0,0E0,6.0568476E-1,0E0,0E0,0E0,0E0,8.572266E-1,0E0,1.0308369E0,1.2108538E0,7.2155976E-1,7.3119414E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,17,17,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,30,30,35,35,37,37,38,38,39,39,40,40],"right_children":[2,4,6,8,10,-1,12,14,16,18,20,22,24,26,28,-1,30,32,-1,-1,-1,34,36,38,40,42,44,46,48,-1,50,-1,-1,-1,-1,52,-1,54,56,58,60,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.5239425E-1,-4.324358E-1,-1.0917766E0,6.5918386E-1,1.9309677E-1,1.0900753E-1,-6.805919E-1,3.9906673E-2,-1.3329034E0,1.3943407E-1,2.1684301E-1,-1.6608919E-1,1.0839374E0,1.1793455E0,-9.3749535E-1,1.06418416E-1,-1.2346513E0,-2.9627237E-1,2.2701089E-1,-1.3926189E-3,-1.4193182E-1,-3.3469358E-1,6.4971733E-1,3.5839206E-1,-5.805907E-2,6.2878585E-1,-8.708964E-1,5.53022E-1,-5.805907E-2,7.524198E-2,8.7190956E-1,1.5734836E-1,-7.992195E-2,-1.1502401E-3,-2.2101323E-1,-7.697409E-2,-1.6925141E-1,-1.0164239E-1,-4.890334E-2,1.3256642E0,-8.6145854E-1,-5.316563E-3,-1.4768729E-1,-2.404066E-2,1.6999643E-1,-1.2281664E-1,6.0505096E-2,3.5842247E-2,1.9761588E-1,-1.6147196E-1,-1.1492491E-2,-5.377551E-2,1.106724E-1,-1.5406455E-2,9.994455E-2,-1.8901332E-1,-1.3869935E-2,1.552687E-1,1.954349E-2,3.5889573E-2,-1.8643574E-1],"split_indices":[7,7,0,6,9,0,9,6,7,5,6,0,3,2,0,0,8,6,0,0,0,2,10,6,4,2,3,6,4,0,5,0,0,0,0,10,0,6,7,3,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.7039725E2,1.659089E2,1.04488365E2,1.5729619E2,8.612704E0,1.916257E0,1.02572105E2,1.4477669E2,1.2519507E1,5.700196E0,2.9125087E0,1.5766813E1,8.680529E1,8.8986E1,5.5790684E1,1.473331E0,1.1046177E1,2.8755896E0,2.8246064E0,1.8000797E0,1.112429E0,4.251101E0,1.1515713E1,6.5899414E1,2.0905874E1,8.476758E1,4.2184253E0,7.1415415E0,4.864914E1,1.1357458E0,9.910431E0,1.5049837E0,1.370606E0,1.103935E0,3.147166E0,9.550579E0,1.9651337E0,5.0733063E1,1.5166354E1,1.7025957E1,3.8799171E0,7.946885E1,5.298726E0,1.0995852E0,3.1188402E0,5.9754663E0,1.1660755E0,4.5717472E1,2.9316676E0,5.933873E0,3.976558E0,4.7220397E0,4.828539E0,4.346301E1,7.2700534E0,4.449876E0,1.0716478E1,4.0326014E0,1.2993357E1,1.9364283E0,1.9434888E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"61","size_leaf_vector":"1"}},{"base_weights":[-2.102559E-3,-3.0390417E-2,3.4754176E-2,-1.5995195E-2,-3.184628E-1,3.230908E-1,1.3499543E-2,-8.146076E-2,5.794976E-2,8.919949E-2,-6.1062604E-1,-8.1702806E-2,4.7362608E-1,-1.4804144E-1,6.478242E-2,-1.6720264E-1,-5.8826152E-2,-7.648245E-2,1.3720368E-1,-6.818576E-2,-2.0800792E-1,6.834684E-1,1.1476189E-2,-4.2895475E-1,-1.6002273E-2,1.0817515E-1,-2.2295557E-1,-2.2351524E-1,-1.009471E-2,-3.002503E-1,1.01039216E-1,1.9180702E-1,-3.0437768E-1,2.4606042E-1,3.5966154E-2,-7.2289705E-2,8.140439E-2,-5.806191E-1,4.9901698E-2,-2.7326822E-1,1.6775724E-1,-1.0106143E-2,2.317757E-1,-6.807034E-1,6.5314E-2,-1.2991908E-1,3.737284E-2,1.5580463E-1,-1.2580942E-2,9.0717174E-2,-1.1502824E-1,1.0087722E-1,-5.4261554E-2,3.196216E-2,1.1389387E-1,4.4694193E-2,-1.3917264E-1,-4.699911E-2,-2.5699115E-1,4.3723986E-2,-1.859956E-1,8.444348E-2,-2.8290011E-2,-8.630933E-2,3.6329556E-2,1.0216913E-1,-1.9476924E-2,-2.7010116E-1,4.1476144E-3,1.6998674E-1,-2.9647017E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":82,"left_children":[1,3,5,7,9,11,13,15,17,-1,19,-1,21,23,25,-1,27,29,31,-1,-1,33,35,37,39,41,43,45,47,49,51,53,55,-1,-1,-1,-1,57,-1,59,61,63,65,67,69,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.8113982E-1,6.2739223E-1,7.133984E-1,7.1301156E-1,1.4780102E0,8.457929E-1,9.218688E-1,8.076306E-1,7.4875396E-1,0E0,7.7950954E-3,0E0,6.466005E-1,9.8866266E-1,1.076439E0,0E0,6.044625E-1,1.0767325E0,1.1181077E0,0E0,0E0,2.5799155E-1,2.703523E-1,8.6789143E-1,9.5809174E-1,1.0828867E0,1.5449595E0,1.3096101E0,1.0229001E0,7.1268713E-1,1.084458E0,5.958406E-1,4.4687697E-1,0E0,0E0,0E0,0E0,6.8857837E-1,0E0,1.3137007E0,3.80466E-1,1.4452971E0,1.1900427E0,6.9607306E-1,7.0519257E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,10,10,12,12,13,13,14,14,16,16,17,17,18,18,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,37,37,39,39,40,40,41,41,42,42,43,43,44,44],"right_children":[2,4,6,8,10,12,14,16,18,-1,20,-1,22,24,26,-1,28,30,32,-1,-1,34,36,38,40,42,44,46,48,50,52,54,56,-1,-1,-1,-1,58,-1,60,62,64,66,68,70,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9592093E-1,1.3372214E-1,2.5811973E-1,6.4971733E-1,-4.803529E-1,-9.3749535E-1,-7.0572996E-1,-5.411535E-1,-6.805919E-1,8.919949E-2,-1.4229501E0,-8.1702806E-2,2.521226E-1,5.691137E-1,2.1961388E-1,-1.6720264E-1,-8.459839E-1,-5.9413753E-2,1.5372332E0,-6.818576E-2,-2.0800792E-1,5.599659E-1,6.1836034E-1,-1.8775555E-1,5.963377E-1,-5.9036255E-1,4.5103574E-1,9.2987575E-2,-7.826177E-1,-6.5014505E-1,3.53506E-1,-1.9826542E-1,-1.2691727E0,2.4606042E-1,3.5966154E-2,-7.2289705E-2,8.140439E-2,-1.0719677E0,4.9901698E-2,5.9381795E-1,-5.976401E-1,-7.269058E-1,4.4686025E-1,6.237966E-1,-4.7897142E-1,-1.2991908E-1,3.737284E-2,1.5580463E-1,-1.2580942E-2,9.0717174E-2,-1.1502824E-1,1.0087722E-1,-5.4261554E-2,3.196216E-2,1.1389387E-1,4.4694193E-2,-1.3917264E-1,-4.699911E-2,-2.5699115E-1,4.3723986E-2,-1.859956E-1,8.444348E-2,-2.8290011E-2,-8.630933E-2,3.6329556E-2,1.0216913E-1,-1.9476924E-2,-2.7010116E-1,4.1476144E-3,1.6998674E-1,-2.9647017E-2],"split_indices":[8,8,8,10,5,0,5,2,9,0,7,0,5,8,0,0,7,2,9,0,0,2,5,3,9,0,0,6,7,6,2,3,5,0,0,0,0,5,0,10,3,1,6,6,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.6764005E2,1.5152148E2,1.16118576E2,1.4531546E2,6.2060122E0,6.997038E0,1.0912154E2,7.7023094E1,6.8292366E1,1.9912827E0,4.21473E0,1.2487032E0,5.748335E0,2.5839714E1,8.328182E1,2.4245558E0,7.459854E1,2.5341885E1,4.2950485E1,1.4133366E0,2.801393E0,3.623961E0,2.1243737E0,7.5430923E0,1.8296621E1,7.29105E1,1.0371322E1,1.6214518E1,5.8384026E1,1.0904858E1,1.4437027E1,3.8727325E1,4.2231565E0,2.5499644E0,1.0739965E0,1.1144946E0,1.0098791E0,6.031169E0,1.5119234E0,7.4205914E0,1.0876031E1,3.772616E1,3.5184345E1,3.4815578E0,6.889764E0,9.96909E0,6.245426E0,2.4435763E0,5.594045E1,1.0029211E0,9.901937E0,7.765199E0,6.671828E0,2.7714142E1,1.1013185E1,1.1164745E0,3.106682E0,2.9963892E0,3.0347795E0,3.6224375E0,3.7981539E0,7.5329576E0,3.3430731E0,1.17255335E1,2.6000624E1,2.5636559E1,9.547788E0,2.4199567E0,1.0616013E0,1.0945786E0,5.795185E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"71","size_leaf_vector":"1"}},{"base_weights":[-4.0848968E-3,2.1892628E-1,-8.250173E-3,3.866601E-1,-3.422443E-2,4.2938825E-2,-2.786199E-2,9.499941E-3,1.7818993E-1,1.3295794E-1,-6.0552645E-2,1.9968064E-1,-3.7827E-2,4.6940982E-1,4.0233262E-2,-4.24443E-1,6.454427E-2,-2.0461826E-2,1.7569539E-1,-2.633972E-1,-1.7980937E-2,2.3626152E-1,6.9773644E-2,-2.2533461E-1,1.6238195E-1,-7.157087E-1,1.9735755E-1,1.8495308E-1,-1.15823455E-1,-2.6516467E-1,1.23581275E-1,4.2455655E-2,-4.532835E-1,-1.4456585E-1,6.417991E-3,-1.0941215E-1,1.1175836E-1,-9.241255E-3,-2.1805279E-1,-6.95931E-2,8.562444E-2,-2.9200053E-1,-7.3458087E-3,-9.815288E-2,1.721815E-1,2.3453422E-2,1.6585542E-1,9.958219E-2,-6.70641E-2,1.4547219E-2,-1.7896205E-1,-1.01267226E-1,8.013633E-2,3.6252692E-2,-1.675697E-1,-7.275767E-2,3.082986E-2,2.9445913E-2,-1.4943714E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":83,"left_children":[1,3,5,7,-1,9,11,-1,-1,13,15,17,19,21,23,25,27,29,-1,31,33,-1,35,37,39,41,43,45,47,49,-1,51,53,55,57,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.475687E-1,3.1709635E-1,2.62901E-1,2.37145E-1,0E0,6.844394E-1,4.3629044E-1,0E0,0E0,1.2022412E0,1.596133E0,6.99592E-1,8.099733E-1,1.0082829E0,1.0674616E0,1.7618543E0,5.9723234E-1,7.567339E-1,0E0,8.6970925E-1,5.2087045E-1,0E0,7.6776713E-1,9.9521166E-1,1.1505299E0,1.0998168E0,9.064515E-1,5.964666E-1,6.158254E-1,4.9834326E-1,0E0,6.471113E-1,6.1877394E-1,6.850829E-1,7.3787695E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,19,19,20,20,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,31,31,32,32,33,33,34,34],"right_children":[2,4,6,8,-1,10,12,-1,-1,14,16,18,20,22,24,26,28,30,-1,32,34,-1,36,38,40,42,44,46,48,50,-1,52,54,56,58,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.2736068E0,-1.6673584E-1,-6.004373E-1,-7.8681755E-1,-3.422443E-2,-1.0894437E0,-1.6897553E0,9.499941E-3,1.7818993E-1,8.034405E-1,-5.5560064E-1,-1.7397814E0,-1.3762592E0,-7.4779874E-1,-4.2012778E-1,-6.575297E-1,5.1640505E-1,2.8029475E-1,1.7569539E-1,-2.7857873E-1,-3.3469358E-1,2.3626152E-1,-8.498767E-1,-6.575297E-1,-8.2880706E-1,3.738951E-1,-1.9863154E-1,3.2722756E-1,-1.2182465E0,-5.2874237E-2,1.23581275E-1,-6.147578E-1,-1.0146359E0,1.0954309E0,-1.6608919E-1,-1.0941215E-1,1.1175836E-1,-9.241255E-3,-2.1805279E-1,-6.95931E-2,8.562444E-2,-2.9200053E-1,-7.3458087E-3,-9.815288E-2,1.721815E-1,2.3453422E-2,1.6585542E-1,9.958219E-2,-6.70641E-2,1.4547219E-2,-1.7896205E-1,-1.01267226E-1,8.013633E-2,3.6252692E-2,-1.675697E-1,-7.275767E-2,3.082986E-2,2.9445913E-2,-1.4943714E-2],"split_indices":[1,3,4,3,0,7,7,0,0,10,3,7,7,9,6,4,3,5,0,6,2,0,3,4,4,0,5,3,9,8,0,6,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.635728E2,3.887254E0,2.5968555E2,2.4779913E0,1.4092625E0,7.160339E1,1.8808215E2,1.3385669E0,1.1394244E0,3.8141968E1,3.3461426E1,7.0925083E0,1.8098964E2,7.362974E0,3.0778992E1,7.948213E0,2.5513214E1,5.119521E0,1.9729874E0,1.364368E1,1.6734596E2,3.5591173E0,3.803857E0,9.430672E0,2.134832E1,5.309905E0,2.6383078E0,1.5284352E1,1.0228861E1,3.5163264E0,1.6031945E0,5.523426E0,8.120254E0,2.6247149E1,1.4109882E2,1.4780644E0,2.3257928E0,7.5597396E0,1.870932E0,4.8659453E0,1.6482376E1,3.5708675E0,1.7390378E0,1.1573893E0,1.4809185E0,1.278598E1,2.4983723E0,1.5800028E0,8.648858E0,2.245023E0,1.2713034E0,1.8655051E0,3.6579208E0,1.235425E0,6.8848286E0,1.881472E1,7.432428E0,5.3337574E1,8.7761246E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"59","size_leaf_vector":"1"}},{"base_weights":[-2.8590886E-3,2.1143068E-4,-2.5998738E-1,-2.7101578E-3,7.72802E-2,-1.1472943E-1,-9.3530776E-4,-1.779425E-1,2.6973835E-3,-3.2802913E-1,1.06864385E-1,1.1480352E-1,-1.9277568E-3,-4.8272344E-1,1.0166203E-2,5.1649986E-3,-2.4692544E-1,-6.051653E-2,-1.753089E-1,3.1793158E-4,1.0022846E-1,-1.15703E-1,4.0686972E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":84,"left_children":[1,3,5,7,-1,-1,-1,9,11,13,-1,-1,15,17,-1,19,21,-1,-1,-1,-1,-1,-1],"loss_changes":[2.0653737E-1,1.957954E-1,9.590179E-2,2.4450037E-1,0E0,0E0,0E0,7.3607534E-1,4.415042E-1,3.830868E-1,0E0,0E0,4.3357506E-1,1.5057087E-2,0E0,3.2744664E-1,4.2492086E-1,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,7,7,8,8,9,9,12,12,13,13,15,15,16,16],"right_children":[2,4,6,8,-1,-1,-1,10,12,14,-1,-1,16,18,-1,20,22,-1,-1,-1,-1,-1,-1],"split_conditions":[2.188833E0,1.8851101E0,2.857023E0,-1.2293531E0,7.72802E-2,-1.1472943E-1,-9.3530776E-4,6.948162E-1,-1.0228932E0,1.201199E0,1.06864385E-1,1.1480352E-1,8.8282496E-1,-7.447781E-1,1.0166203E-2,8.220804E-1,1.7299052E0,-6.051653E-2,-1.753089E-1,3.1793158E-4,1.0022846E-1,-1.15703E-1,4.0686972E-2],"split_indices":[1,1,1,2,0,0,0,7,2,9,0,0,1,3,0,1,2,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.605299E2,2.5844287E2,2.0870397E0,2.5652063E2,1.9222584E0,1.0823113E0,1.0047283E0,6.723935E0,2.4979668E2,5.5503116E0,1.1736239E0,2.0210063E0,2.4777568E2,3.654477E0,1.8958344E0,2.4175569E2,6.0199804E0,1.7635335E0,1.8909434E0,2.397665E2,1.9891968E0,4.411679E0,1.6083019E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"23","size_leaf_vector":"1"}},{"base_weights":[-2.198503E-3,1.9153124E-1,-5.7757716E-3,3.6233485E-1,-4.4848345E-2,-1.310461E-2,8.825231E-2,2.2480138E-2,1.3630015E-1,-8.8924654E-2,-9.770498E-3,1.6649866E-1,-1.3253377E-1,2.3593175E-1,-1.595389E-2,4.2677292E-1,6.192031E-2,1.0842872E-1,-1.13083914E-1,4.5390153E-1,-5.735041E-2,-1.967348E-1,-6.6688135E-3,1.8477055E-1,6.999261E-3,-1.5295953E-1,1.9455026E-1,-3.4288842E-2,9.337541E-2,1.7462556E-1,2.0707948E-2,1.919079E-2,-2.146499E-1,2.6375843E-2,-5.797143E-3,2.4629598E-2,-1.412625E-1,1.13522634E-1,-9.863343E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":85,"left_children":[1,3,5,7,-1,9,11,-1,-1,-1,13,15,17,19,21,23,25,27,-1,29,-1,31,33,-1,-1,35,37,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.8195982E-1,3.3146793E-1,1.7839904E-1,5.7546496E-2,0E0,2.2331022E-1,3.4558696E-1,0E0,0E0,0E0,3.6493215E-1,3.6198798E-1,3.6225578E-1,6.359463E-1,3.895001E-1,2.8477478E-1,3.5321134E-1,1.9478865E-1,0E0,1.6713166E-1,0E0,1.5792832E0,2.6801828E-1,0E0,0E0,4.0418902E-1,8.420076E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,19,19,21,21,22,22,25,25,26,26],"right_children":[2,4,6,8,-1,10,12,-1,-1,-1,14,16,18,20,22,24,26,28,-1,30,-1,32,34,-1,-1,36,38,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.2736068E0,-3.1497508E-1,1.9354022E0,-5.1479864E-1,-4.4848345E-2,-1.5542805E0,-3.720675E-1,2.2480138E-2,1.3630015E-1,-8.8924654E-2,-1.3526601E0,2.019251E0,-6.2121356E-1,-6.7486215E-1,-1.1510396E0,-3.2037044E-1,-7.146222E-1,9.323718E-1,-1.13083914E-1,1.9577834E-1,-5.735041E-2,6.533265E-2,-8.8221234E-1,1.8477055E-1,6.999261E-3,-7.431684E-1,2.3453663E-1,-3.4288842E-2,9.337541E-2,1.7462556E-1,2.0707948E-2,1.919079E-2,-2.146499E-1,2.6375843E-2,-5.797143E-3,2.4629598E-2,-1.412625E-1,1.13522634E-1,-9.863343E-2],"split_indices":[1,4,10,4,0,9,4,0,0,0,9,10,5,8,9,0,4,9,0,5,0,0,9,0,0,4,6,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.599632E2,3.760658E0,2.562025E2,2.464048E0,1.2966102E0,2.3847563E2,1.7726894E1,1.0374868E0,1.426561E0,1.7513759E0,2.3672426E2,1.3269928E1,4.4569654E0,4.89909E0,2.3182516E2,2.9205015E0,1.0349427E1,2.5244856E0,1.9324801E0,3.202133E0,1.696957E0,1.0338854E1,2.2148631E2,1.6298358E0,1.2906656E0,3.8914428E0,6.457984E0,1.4112391E0,1.1132464E0,2.0177243E0,1.1844088E0,7.46435E0,2.8745036E0,2.5434168E1,1.9605214E2,2.6637194E0,1.2277232E0,4.9839244E0,1.4740597E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"39","size_leaf_vector":"1"}},{"base_weights":[-1.4963081E-3,-6.4944183E-3,1.3155079E-1,-2.419578E-3,-1.1167312E-1,4.490256E-1,-1.2252757E-1,-6.307563E-3,1.0067156E-1,2.0862292E-1,2.1907957E-2,1.696946E-1,-4.4804308E-1,-1.9440191E-3,-1.509368E-1,-3.838971E-2,1.539043E-1,-1.765577E-1,-2.3869766E-2,-8.463751E-3,2.64337E-1,-6.817954E-4,-1.0705859E-1,-8.772005E-2,1.5015787E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":86,"left_children":[1,3,5,7,-1,9,11,13,-1,-1,-1,15,17,19,-1,-1,-1,-1,-1,21,23,-1,-1,-1,-1],"loss_changes":[1.7350233E-1,3.7345058E-1,8.170617E-1,3.2987958E-1,0E0,3.581338E-1,6.49922E-1,5.3570974E-1,0E0,0E0,0E0,4.646552E-1,1.1512959E-1,4.3002936E-1,0E0,0E0,0E0,0E0,0E0,5.200803E-1,9.248189E-1,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,5,5,6,6,7,7,11,11,12,12,13,13,19,19,20,20],"right_children":[2,4,6,8,-1,10,12,14,-1,-1,-1,16,18,20,-1,-1,-1,-1,-1,22,24,-1,-1,-1,-1],"split_conditions":[1.6019539E0,1.5085721E0,-5.805907E-2,1.3951799E0,-1.1167312E-1,1.9877853E0,-8.747628E-1,1.3184733E0,1.0067156E-1,2.0862292E-1,2.1907957E-2,2.349285E0,-5.2874237E-2,1.2017461E0,-1.509368E-1,-3.838971E-2,1.539043E-1,-1.765577E-1,-2.3869766E-2,1.3846228E0,-5.750444E-1,-6.817954E-4,-1.0705859E-1,-8.772005E-2,1.5015787E-1],"split_indices":[7,7,4,7,0,3,5,7,0,0,0,2,8,7,0,0,0,0,0,6,1,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.5862726E2,2.5018005E2,8.447196E0,2.4841902E2,1.7610399E0,3.4140346E0,5.033161E0,2.4656409E2,1.854939E0,1.5493081E0,1.8647267E0,2.9044578E0,2.1287036E0,2.4541252E2,1.1515535E0,1.891653E0,1.0128049E0,1.1087916E0,1.019912E0,2.4049243E2,4.920098E0,2.372824E2,3.2100284E0,1.3946604E0,3.5254376E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"25","size_leaf_vector":"1"}},{"base_weights":[-1.3506856E-3,1.7021074E-3,-2.1326298E-1,-9.873897E-3,7.2291605E-2,-1.127822E-1,1.896319E-2,7.62333E-2,-2.1419933E-2,2.9872684E-2,3.7675473E-1,2.0034418E-1,-1.7613955E-1,-4.2920637E-1,-1.4071076E-2,2.1807571E-1,-8.656087E-2,1.8763866E-1,-5.0604146E-2,4.868395E-1,1.6081154E-2,-3.861141E-3,-1.7429669E-1,-1.3188456E-2,-1.7113449E-1,-3.1117974E-2,2.0084535E-1,-7.161722E-2,2.790276E-1,2.279705E-1,-1.8791859E-1,1.8569877E-2,1.6801138E-1,-1.2738658E-1,4.5175686E-2,-1.577286E-1,5.3797282E-2,7.1358387E-3,-3.1009717E-2,-1.8705541E-2,1.2618627E-1,1.4160101E-1,1.8288046E-2,-1.786991E-2,1.51003E-1,-1.128718E-1,1.3842134E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":87,"left_children":[1,3,5,7,9,-1,-1,11,13,15,17,19,21,23,25,27,29,-1,-1,31,33,35,-1,-1,-1,37,39,-1,41,43,45,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.6775565E-1,2.0995916E-1,1.8938959E-1,2.2122328E-1,4.5622715E-1,0E0,0E0,8.601335E-1,5.7994175E-1,7.388675E-1,6.9427574E-1,9.487752E-1,6.4036334E-1,1.614263E-1,7.1374047E-1,4.2417008E-1,7.127855E-1,0E0,0E0,1.8489158E-1,7.7894384E-1,8.0843055E-1,0E0,0E0,0E0,7.1561E-1,8.5489017E-1,0E0,4.6742648E-1,4.7128E-1,7.4205446E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,19,19,20,20,21,21,25,25,26,26,28,28,29,29,30,30],"right_children":[2,4,6,8,10,-1,-1,12,14,16,18,20,22,24,26,28,30,-1,-1,32,34,36,-1,-1,-1,38,40,-1,42,44,46,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9762269E0,1.066704E0,2.9675448E-1,-1.2694222E0,8.7150747E-1,-1.127822E-1,1.896319E-2,4.5761305E-1,-1.180954E0,1.3154992E0,9.282538E-1,2.0122655E-1,-9.874513E-1,1.3264844E-1,3.968685E-1,-1.1001399E0,-1.1689172E0,1.8763866E-1,-5.0604146E-2,-1.8356185E0,4.346811E-1,2.844956E-1,-1.7429669E-1,-1.3188456E-2,-1.7113449E-1,-5.1429987E-1,-5.9413753E-2,-7.161722E-2,-5.9413753E-2,-1.5472329E-1,4.6192405E-1,1.8569877E-2,1.6801138E-1,-1.2738658E-1,4.5175686E-2,-1.577286E-1,5.3797282E-2,7.1358387E-3,-3.1009717E-2,-1.8705541E-2,1.2618627E-1,1.4160101E-1,1.8288046E-2,-1.786991E-2,1.51003E-1,-1.128718E-1,1.3842134E-2],"split_indices":[1,8,0,6,6,0,0,1,6,8,5,7,5,10,1,5,0,0,0,6,7,4,0,0,0,1,2,0,2,6,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.5775156E2,2.55069E2,2.6825376E0,2.198722E2,3.519682E1,1.4623291E0,1.2202084E0,2.5334183E1,1.9453801E2,3.1856562E1,3.3402576E0,1.7120762E1,8.21342E0,2.4275804E0,1.9211043E2,1.1842079E1,2.0014482E1,2.1933932E0,1.1468644E0,6.058622E0,1.106214E1,6.4698124E0,1.7436075E0,1.0030253E0,1.424555E0,1.7878467E2,1.3325762E1,1.0507448E0,1.0791334E1,4.573358E0,1.5441125E1,1.1614833E0,4.8971386E0,2.0823615E0,8.979779E0,1.1950294E0,5.274783E0,1.0196667E2,7.681799E1,6.389803E0,6.9359593E0,5.1072555E0,5.684078E0,2.6206424E0,1.9527155E0,8.219983E0,7.221142E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"47","size_leaf_vector":"1"}},{"base_weights":[-1.5116753E-3,-1.2981688E-2,6.024603E-2,2.1615233E-1,-2.0640422E-2,-1.9875158E-1,1.1054169E-1,4.412333E-1,2.5132146E-2,-1.1731736E-2,-3.0738536E-1,7.6273434E-2,-4.862269E-1,-1.4147142E-1,1.8651238E-1,1.2825548E-2,1.9708382E-1,-1.372414E-1,4.332929E-1,-2.3201961E-2,1.6381875E-1,5.6083474E-2,-4.6365452E-1,-3.608502E-1,1.8172641E-1,-1.9042131E-1,-2.6787547E-2,-2.5804773E-1,1.1000874E-1,4.852828E-1,1.0950479E-1,1.9457652E-1,6.6660834E-3,-1.1761323E-2,-2.32293E-1,5.9903437E-1,-6.5242775E-2,-5.7715005E-1,-3.4086495E-3,-3.2033937E-3,-1.6126512E-1,6.841137E-2,-4.0160668E-1,2.079342E-1,-7.408191E-3,3.5995123E-1,-1.1163544E-2,-6.132205E-3,1.499633E-1,-1.254457E-1,7.6604515E-2,2.2551428E-1,1.0738235E-2,1.13179505E-1,-6.055698E-2,-2.1014391E-1,-3.3297595E-2,-1.8768784E-2,-1.8713E-1,-1.03327595E-1,1.6042358E-1,-1.4615072E-1,4.5044877E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":88,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,-1,-1,-1,31,33,35,-1,37,39,-1,-1,-1,41,-1,43,45,-1,-1,47,49,51,53,55,-1,-1,-1,-1,57,-1,-1,59,61,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.828428E-1,3.8629025E-1,5.4869545E-1,2.9523063E-1,5.365652E-1,5.852084E-1,6.869539E-1,2.4786162E-1,1.1662303E0,4.1885504E-1,5.758141E-1,1.2386959E0,1.5113306E-1,6.0174596E-1,5.6672716E-1,0E0,0E0,0E0,2.666754E-1,4.6220112E-1,1.311738E0,0E0,2.551607E-1,1.8242317E-1,0E0,0E0,0E0,6.184355E-1,0E0,5.7141197E-1,6.9040096E-1,0E0,0E0,8.332106E-1,1.0157785E0,3.6009717E-1,6.362895E-1,1.5541422E-1,0E0,0E0,0E0,0E0,4.2727703E-1,0E0,0E0,1.097115E0,1.3245428E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,18,18,19,19,20,20,22,22,23,23,27,27,29,29,30,30,33,33,34,34,35,35,36,36,37,37,42,42,45,45,46,46],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,-1,-1,-1,32,34,36,-1,38,40,-1,-1,-1,42,-1,44,46,-1,-1,48,50,52,54,56,-1,-1,-1,-1,58,-1,-1,60,62,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[7.8739315E-1,-1.0831622E0,-1.0552125E0,-8.1802267E-1,1.3256642E0,1.1254588E0,-7.4779874E-1,-7.573051E-2,-5.750444E-1,7.266022E-1,-3.222801E-2,1.0127703E0,-6.126634E-1,1.0417056E0,-5.461783E-1,1.2825548E-2,1.9708382E-1,-1.372414E-1,-3.445578E-1,4.3232614E-1,-6.9653356E-1,5.6083474E-2,9.7288567E-1,9.282538E-1,1.8172641E-1,-1.9042131E-1,-2.6787547E-2,9.282538E-1,1.1000874E-1,7.8226164E-2,-7.3706096E-1,1.9457652E-1,6.6660834E-3,3.902867E-1,6.414902E-1,2.8029475E-1,7.6864165E-1,1.9247261E0,-3.4086495E-3,-3.2033937E-3,-1.6126512E-1,6.841137E-2,-5.7189107E-1,2.079342E-1,-7.408191E-3,-9.793865E-1,-2.1014415E-1,-6.132205E-3,1.499633E-1,-1.254457E-1,7.6604515E-2,2.2551428E-1,1.0738235E-2,1.13179505E-1,-6.055698E-2,-2.1014391E-1,-3.3297595E-2,-1.8768784E-2,-1.8713E-1,-1.03327595E-1,1.6042358E-1,-1.4615072E-1,4.5044877E-2],"split_indices":[5,10,10,1,3,5,9,9,1,3,7,5,8,2,9,0,0,0,9,3,1,0,2,5,0,0,0,5,0,2,8,0,0,3,6,5,3,3,0,0,0,0,4,0,0,7,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.5601663E2,2.1658159E2,3.943504E1,6.124542E0,2.1045705E2,5.9327354E0,3.3502304E1,2.210275E0,3.9142673E0,2.0512506E2,5.331979E0,3.4074945E0,2.525241E0,7.560412E0,2.594189E1,1.197095E0,1.0131799E0,1.7382151E0,2.1760523E0,1.9335909E2,1.1765981E1,1.2335517E0,4.098427E0,2.040995E0,1.3664997E0,1.4017047E0,1.1235362E0,6.5500045E0,1.0104074E0,4.22975E0,2.1712141E1,1.0489035E0,1.1271487E0,1.8432954E2,9.029537E0,3.500287E0,8.265694E0,3.0556142E0,1.0428127E0,1.0401427E0,1.0008522E0,1.3592316E0,5.190773E0,2.7499518E0,1.4797986E0,6.414956E0,1.5297185E1,1.821988E2,2.1307418E0,6.6409216E0,2.388616E0,2.4905412E0,1.009746E0,1.5343181E0,6.731375E0,2.0188334E0,1.0367807E0,2.562178E0,2.628595E0,1.0824571E0,5.332499E0,3.3606122E0,1.1936572E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"63","size_leaf_vector":"1"}},{"base_weights":[2.2848503E-4,2.303518E-2,-3.544679E-2,6.173421E-3,2.9137114E-1,-4.10431E-1,-1.3456953E-2,1.3029459E-2,-1.4688964E-1,-2.4061775E-1,5.047208E-1,-2.0835762E-1,2.5516236E-2,2.012701E-1,-4.2155966E-2,-2.9519805E-1,2.4003467E-2,8.95289E-2,-1.8785061E-1,1.4523398E-3,6.5315247E-1,9.087581E-2,-7.707732E-2,3.433649E-1,-1.1995561E-1,-2.0273064E-1,-1.2270861E-2,-1.7170912E-1,3.0479077E-2,1.5446799E-2,3.954615E-1,4.2023096E-2,2.3111014E-1,7.5405234E-1,2.615455E-2,1.530841E-3,-4.9791393E-1,-4.730552E-2,2.0844817E-1,1.6438974E-2,-1.7076898E-2,1.71298E-1,3.53543E-3,2.953686E-1,1.296256E-2,8.265086E-2,-1.1915554E-1,8.5628E-2,-1.3307239E-1,-2.9364359E-2,-1.8987732E-1,-5.5490844E-3,-1.5854764E-1,1.4206584E-1,-9.899296E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":89,"left_children":[1,3,5,7,9,11,13,15,-1,17,19,-1,21,23,25,27,29,-1,-1,-1,31,-1,-1,33,-1,35,37,-1,-1,39,41,-1,-1,43,45,47,49,51,53,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.0791271E-1,7.034812E-1,8.16223E-1,5.083893E-1,1.1649868E0,6.972133E-1,5.94962E-1,5.0310016E-1,0E0,8.7457246E-1,5.077096E-1,0E0,3.230028E-1,1.1104684E0,4.0433526E-1,6.20872E-1,4.4689918E-1,0E0,0E0,0E0,2.1091533E-1,0E0,0E0,1.250847E0,0E0,7.989311E-1,5.707862E-1,0E0,0E0,4.0281737E-1,2.0666432E-1,0E0,0E0,6.4800453E-1,7.996432E-1,1.2412349E0,2.3061502E-1,8.6932456E-1,6.634996E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,9,9,10,10,12,12,13,13,14,14,15,15,16,16,20,20,23,23,25,25,26,26,29,29,30,30,33,33,34,34,35,35,36,36,37,37,38,38],"right_children":[2,4,6,8,10,12,14,16,-1,18,20,-1,22,24,26,28,30,-1,-1,-1,32,-1,-1,34,-1,36,38,-1,-1,40,42,-1,-1,44,46,48,50,52,54,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.1497508E-1,-3.435213E-1,-9.3749535E-1,1.3492355E0,4.5194067E-3,1.0410126E0,-8.28857E-1,-1.1001399E0,-1.4688964E-1,-3.3165962E-1,-7.572781E-1,-2.0835762E-1,-7.0273864E-1,2.9675448E-1,-2.293364E-1,-1.9826542E-1,1.8060604E0,8.95289E-2,-1.8785061E-1,1.4523398E-3,2.521226E-1,9.087581E-2,-7.707732E-2,-4.2400864E-1,-1.1995561E-1,1.2588996E-1,1.0127703E0,-1.7170912E-1,3.0479077E-2,-7.826177E-1,2.0748878E0,4.2023096E-2,2.3111014E-1,-7.573051E-2,-8.6605296E-2,-3.5239425E-1,9.406206E-3,8.7190956E-1,7.830393E-1,1.6438974E-2,-1.7076898E-2,1.71298E-1,3.53543E-3,2.953686E-1,1.296256E-2,8.265086E-2,-1.1915554E-1,8.5628E-2,-1.3307239E-1,-2.9364359E-2,-1.8987732E-1,-5.5490844E-3,-1.5854764E-1,1.4206584E-1,-9.899296E-3],"split_indices":[4,4,0,6,6,10,3,5,0,6,1,0,3,0,4,3,9,0,0,0,5,0,0,5,0,9,5,0,0,7,9,0,0,9,4,7,2,5,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.5353203E2,1.5487619E2,9.865584E1,1.4668196E2,8.19423E0,4.4864E0,9.416943E1,1.4564174E2,1.0402248E0,2.3089755E0,5.8852544E0,2.3571632E0,2.129237E0,1.0393294E1,8.3776146E1,4.116677E0,1.4152507E2,1.0570524E0,1.251923E0,1.5838544E0,4.3014E0,1.0376186E0,1.0916183E0,8.753025E0,1.6402693E0,1.22378235E1,7.153832E1,2.1631944E0,1.9534826E0,1.3935652E2,2.1685424E0,1.2081397E0,3.0932603E0,3.2143362E0,5.5386887E0,7.8207917E0,4.417032E0,6.2416626E1,9.121695E0,9.042978E1,4.8926746E1,1.1528842E0,1.0156581E0,2.136469E0,1.0778672E0,3.705408E0,1.8332806E0,4.994169E0,2.8266225E0,1.549851E0,2.8671808E0,5.987062E1,2.5460055E0,3.8896189E0,5.2320757E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"55","size_leaf_vector":"1"}},{"base_weights":[-1.4176216E-3,1.4342225E-1,-5.6957593E-3,1.4454824E-1,-7.7276314E-3,-3.999371E-2,2.047415E-2,-3.212509E-1,2.9340994E-1,-9.838173E-2,7.0140034E-2,9.911749E-2,-2.0865545E-2,9.625211E-3,-1.4846523E-1,1.3884649E-1,-5.419622E-3,-1.359223E-1,1.9433612E-1,-4.428872E-1,1.2567687E-1,-1.4794832E-1,1.7439997E-1,-3.273904E-1,2.033022E-2,-3.1109532E-2,-3.512689E-1,3.7520733E-1,-1.6527486E-1,-3.096288E-3,-1.8845056E-1,-3.0198318E-1,2.2001521E-1,1.6614243E-1,-3.9748684E-1,-1.3746333E-1,2.1864732E-1,1.128234E-1,-6.796246E-1,-3.4822416E-2,1.4328556E-1,3.7453875E-2,-6.567639E-2,1.1620026E-1,-1.4517124E-1,1.8020502E-1,-5.047414E-2,1.6080163E-2,-1.5773967E-1,1.15890145E-1,-1.1694972E-2,-8.403734E-2,1.13667436E-1,1.1328639E-1,-1.7896965E-1,-3.5845276E-2,9.451898E-2,-8.767347E-2,9.8630674E-2,-2.4366795E-1,-3.8981603E-3,2.7323814E-2,-3.60266E-2,1.03478424E-1,-1.0773449E-3],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":90,"left_children":[1,3,5,-1,7,9,11,13,15,17,19,21,23,-1,-1,-1,-1,25,27,29,31,33,35,37,39,41,43,45,-1,-1,-1,47,49,51,53,-1,55,57,59,61,63,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.5705982E-1,3.7569863E-1,2.2151162E-1,0E0,6.617943E-1,6.9336665E-1,4.5762777E-1,2.2584274E-1,1.9773299E-1,7.976245E-1,1.1261088E0,9.2702365E-1,1.1768695E0,0E0,0E0,0E0,0E0,1.4063635E0,1.337516E0,3.0535614E-1,1.4655712E0,9.9536777E-1,1.1525619E0,1.7881533E0,5.6535655E-1,1.2965628E0,2.1917534E0,1.001162E0,0E0,0E0,0E0,5.605231E-1,1.263609E0,6.7149836E-1,1.2903503E0,0E0,1.2052811E0,6.0024965E-1,5.440366E-1,6.319694E-1,7.648678E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,31,31,32,32,33,33,34,34,36,36,37,37,38,38,39,39,40,40],"right_children":[2,4,6,-1,8,10,12,14,16,18,20,22,24,-1,-1,-1,-1,26,28,30,32,34,36,38,40,42,44,46,-1,-1,-1,48,50,52,54,-1,56,58,60,62,64,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.2432345E0,-1.312893E0,-6.6069925E-1,1.4454824E-1,6.9351125E-1,1.3943407E-1,-7.6594234E-1,-1.2823372E-1,-1.3520625E-1,2.522303E-1,-7.717146E-1,-4.803529E-1,-5.6583846E-1,9.625211E-3,-1.4846523E-1,1.3884649E-1,-5.419622E-3,-2.9627237E-1,5.599659E-1,-1.7241693E-1,-6.7486215E-1,-2.8234428E-1,-9.395118E-1,-3.2037044E-1,5.09969E-1,-7.657978E-1,-7.717146E-1,4.9994114E-1,-1.6527486E-1,-3.096288E-3,-1.8845056E-1,-4.5770618E-1,2.3453663E-1,-3.3206618E-1,-1.016626E0,-1.3746333E-1,-1.4863164E0,-4.5355532E-1,8.7190956E-1,-6.9653356E-1,-8.523691E-3,3.7453875E-2,-6.567639E-2,1.1620026E-1,-1.4517124E-1,1.8020502E-1,-5.047414E-2,1.6080163E-2,-1.5773967E-1,1.15890145E-1,-1.1694972E-2,-8.403734E-2,1.13667436E-1,1.1328639E-1,-1.7896965E-1,-3.5845276E-2,9.451898E-2,-8.767347E-2,9.8630674E-2,-2.4366795E-1,-3.8981603E-3,2.7323814E-2,-3.60266E-2,1.03478424E-1,-1.0773449E-3],"split_indices":[1,7,3,0,8,5,7,2,3,6,4,5,7,0,0,0,0,6,2,6,8,3,1,0,10,3,4,6,0,0,0,4,6,1,9,0,7,1,5,1,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.5114912E2,6.2722573E0,2.4487688E2,1.2610906E0,5.0111666E0,1.0575175E2,1.3912512E2,2.422372E0,2.5887945E0,6.9181496E1,3.657025E1,4.745348E1,9.1671646E1,1.0667691E0,1.3556029E0,1.3620652E0,1.2267294E0,6.1792385E1,7.3891144E0,2.8908713E0,3.367938E1,1.0857044E1,3.6596436E1,1.0037624E1,8.163402E1,4.233278E1,1.9459604E1,6.345589E0,1.0435249E0,1.1835067E0,1.7073644E0,5.688929E0,2.799045E1,4.954758E0,5.9022865E0,1.7807734E0,3.4815662E1,4.7637267E0,5.2738976E0,5.685019E1,2.4783834E1,2.3310984E1,1.9021797E1,2.670142E0,1.6789463E1,4.410424E0,1.935165E0,2.4913502E0,3.1975791E0,1.6746893E1,1.1243557E1,1.4972998E0,3.4574583E0,1.0228804E0,4.879406E0,7.6716967E0,2.7143967E1,1.533631E0,3.2300954E0,4.216728E0,1.0571697E0,2.292766E1,3.3922527E1,9.876405E0,1.490743E1],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"65","size_leaf_vector":"1"}},{"base_weights":[-3.5601244E-3,1.7636195E-1,-7.779653E-3,-4.9459815E-2,3.329496E-1,-1.8580726E-1,-1.6345643E-3,2.3613658E-3,1.4635934E-1,-4.2592883E-2,-1.5673172E-1,-7.2285105E-3,2.6338837E-1,-1.1129435E-1,8.698698E-2,-8.7692146E-4,-1.3556987E-1,1.4728905E-1,-1.2706601E-2,1.0799618E-1,-1.5658188E-1,-8.797856E-3,3.4769732E-1,-1.3799228E-1,6.81542E-2,-1.0295082E-3,-7.6357566E-2,1.97707E-1,-1.9582449E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":91,"left_children":[1,3,5,-1,7,9,11,-1,-1,13,-1,15,17,-1,19,21,-1,-1,-1,-1,23,25,27,-1,-1,-1,-1,-1,-1],"loss_changes":[1.8972349E-1,3.6091244E-1,2.6594105E-1,0E0,2.105374E-1,3.6880064E-1,3.5216185E-1,0E0,0E0,3.2074314E-1,0E0,6.5583825E-1,3.6246017E-1,0E0,4.3346554E-1,6.3831025E-1,0E0,0E0,0E0,0E0,5.345921E-1,2.970547E-1,7.034435E-1,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,9,9,11,11,12,12,14,14,15,15,20,20,21,21,22,22],"right_children":[2,4,6,-1,8,10,12,-1,-1,14,-1,16,18,-1,20,22,-1,-1,-1,-1,24,26,28,-1,-1,-1,-1,-1,-1],"split_conditions":[-1.8179249E0,2.603036E-1,-1.3255169E0,-4.9459815E-2,8.034405E-1,3.825173E-1,3.382379E0,2.3613658E-3,1.4635934E-1,-7.447781E-1,-1.5673172E-1,2.5338323E0,3.7680821E0,-1.1129435E-1,1.5389027E-1,1.7624261E0,-1.3556987E-1,1.4728905E-1,-1.2706601E-2,1.0799618E-1,9.571637E-1,1.551015E0,7.664258E-1,-1.3799228E-1,6.81542E-2,-1.0295082E-3,-7.6357566E-2,1.97707E-1,-1.9582449E-2],"split_indices":[6,9,5,0,10,8,0,0,0,3,0,0,0,0,1,0,0,0,0,0,10,1,2,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.4709648E2,4.727277E0,2.423692E2,1.470333E0,3.2569437E0,7.1113596E0,2.3525784E2,1.3903011E0,1.8666426E0,5.7792153E0,1.3321438E0,2.3134743E2,3.9104185E0,1.1080873E0,4.6711283E0,2.290773E2,2.2701209E0,1.8944881E0,2.0159304E0,1.9771273E0,2.694001E0,2.2494057E2,4.136746E0,1.3936377E0,1.3003633E0,2.2112561E2,3.8149471E0,2.0189335E0,2.1178124E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"29","size_leaf_vector":"1"}},{"base_weights":[-2.7166947E-3,-5.361324E-3,7.659285E-2,-3.122472E-4,-3.6277562E-1,-5.575511E-3,2.4688858E-1,-2.1240315E-3,-1.8029468E-1,1.21269E-1,-1.2671055E-2,-4.3603823E-2,3.8832304E-1,-7.348275E-2,2.0501588E-1,1.8248323E-3,-1.5365696E-1,1.5640235E-1,2.7420426E-2,8.859272E-2,-9.3644805E-2,4.079263E-3,-6.660955E-2,-8.973698E-2,6.624462E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":92,"left_children":[1,3,-1,5,7,9,11,-1,-1,13,15,-1,17,-1,19,21,23,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.7011583E-1,4.4339025E-1,0E0,3.1798023E-1,2.8573573E-1,2.168631E-1,3.390946E-1,0E0,0E0,4.4181973E-1,4.651743E-1,0E0,1.110093E-1,0E0,6.028767E-1,5.4921913E-1,1.2219528E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,3,3,4,4,5,5,6,6,9,9,10,10,12,12,14,14,15,15,16,16],"right_children":[2,4,-1,6,8,10,12,-1,-1,14,16,-1,18,-1,20,22,24,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[3.2846105E0,2.5453355E0,7.659285E-2,2.1420946E0,-5.3458095E-1,-1.6586821E0,-4.5355532E-1,-2.1240315E-3,-1.8029468E-1,2.1961388E-1,2.844956E-1,-4.3603823E-2,3.3612394E-1,-7.348275E-2,1.2684474E0,8.367388E-1,1.1683954E0,1.5640235E-1,2.7420426E-2,8.859272E-2,-9.3644805E-2,4.079263E-3,-6.660955E-2,-8.973698E-2,6.624462E-2],"split_indices":[9,9,0,9,3,6,1,0,0,0,4,0,1,0,7,0,7,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.4629057E2,2.4476112E2,1.5294478E0,2.4233856E2,2.4225652E0,2.3824345E2,4.0951033E0,1.384669E0,1.0378963E0,1.1768649E1,2.2647481E2,1.0779659E0,3.0171373E0,1.8321517E0,9.936498E0,2.0625513E2,2.021968E1,1.561693E0,1.4554445E0,8.824747E0,1.1117502E0,1.9684236E2,9.412762E0,1.4707622E1,5.5120597E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"25","size_leaf_vector":"1"}},{"base_weights":[-2.5718822E-3,-3.1139093E-2,2.1480747E-2,1.2112687E-1,-4.0167924E-2,-1.8088256E-1,3.7692662E-2,-2.2450702E-2,-2.708684E-1,1.7858586E-1,-3.7061647E-1,2.634357E-1,1.4735463E-2,-5.736563E-2,3.3101466E-1,8.9187786E-2,-4.284991E-1,1.2329868E-1,-8.2223415E-2,-5.829354E-1,3.565702E-2,3.8456225E-1,-1.8707293E-1,2.4964693E-1,-3.6481302E-3,-8.2217105E-2,2.3613839E-1,-9.0829924E-2,5.028282E-1,5.021449E-3,-6.3111746E-1,-4.272269E-2,-2.0906086E-1,5.149658E-1,-2.2154067E-2,-9.424341E-2,9.455813E-3,-1.0619169E-1,1.7359486E-1,1.5685026E-2,-4.1405368E-1,-2.9344762E-2,1.0840784E-1,1.6887255E-1,-2.4251265E-2,-1.1262616E-1,5.6123022E-2,1.8836597E-1,-2.818576E-2,-2.2160718E-1,-6.9635995E-2,1.9117926E-1,2.9366145E-2,9.224194E-2,-1.1515674E-1,-1.3888213E-2,4.9590807E-2,-2.0113806E-1,3.3289064E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":93,"left_children":[1,3,5,-1,7,9,11,13,15,17,19,21,23,25,27,-1,29,-1,-1,31,-1,33,35,37,39,41,43,45,47,-1,49,-1,-1,51,-1,-1,-1,53,-1,55,57,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.6993757E-1,4.5997703E-1,4.475231E-1,0E0,4.5017007E-1,7.574978E-1,6.449413E-1,1.3117794E0,8.322888E-1,5.5203134E-1,7.925468E-1,6.993432E-1,4.9624133E-1,7.2101736E-1,7.3892653E-1,0E0,6.0334945E-1,0E0,0E0,1.528858E-1,0E0,5.992559E-1,9.626011E-2,1.037712E0,8.5833716E-1,6.540796E-1,8.4999204E-1,3.5616037E-1,5.7766545E-1,0E0,2.3471236E-2,0E0,0E0,3.1684756E-1,0E0,0E0,0E0,6.7521787E-1,0E0,9.6736723E-1,7.295059E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,19,19,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,30,30,33,33,37,37,39,39,40,40],"right_children":[2,4,6,-1,8,10,12,14,16,18,20,22,24,26,28,-1,30,-1,-1,32,-1,34,36,38,40,42,44,46,48,-1,50,-1,-1,52,-1,-1,-1,54,-1,56,58,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-2.432298E-1,-1.7322416E0,-1.1510396E0,1.2112687E-1,9.0406567E-1,-2.9598737E-2,-9.494192E-1,4.9114594E-1,-8.8221234E-1,-5.4334486E-1,4.743656E-1,-5.2874237E-2,-1.0610008E0,2.7579596E-2,-6.004373E-1,8.9187786E-2,-8.2178444E-1,1.2329868E-1,-8.2223415E-2,1.9577834E-1,3.565702E-2,3.53506E-1,-6.9653356E-1,-7.0273864E-1,1.379008E0,1.6716468E0,1.9885693E-1,-7.572781E-1,6.414902E-1,5.021449E-3,1.0410126E0,-4.272269E-2,-2.0906086E-1,-3.3206618E-1,-2.2154067E-2,-9.424341E-2,9.455813E-3,-6.805919E-1,1.7359486E-1,2.521226E-1,4.6192405E-1,-2.9344762E-2,1.0840784E-1,1.6887255E-1,-2.4251265E-2,-1.1262616E-1,5.6123022E-2,1.8836597E-1,-2.818576E-2,-2.2160718E-1,-6.9635995E-2,1.9117926E-1,2.9366145E-2,9.224194E-2,-1.1515674E-1,-1.3888213E-2,4.9590807E-2,-2.0113806E-1,3.3289064E-2],"split_indices":[0,8,9,0,2,5,9,2,9,4,3,8,1,4,4,0,0,0,0,5,0,2,1,3,5,9,4,1,6,0,10,0,0,1,0,0,0,9,0,5,9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.4534642E2,1.1201364E2,1.3333278E2,1.3890326E0,1.1062461E2,9.136027E0,1.2419675E2,1.0375388E2,6.8707256E0,3.176538E0,5.9594893E0,1.0497501E1,1.1369925E2,9.518893E1,8.564955E0,1.2997881E0,5.5709376E0,2.1597273E0,1.0168105E0,4.0233393E0,1.93615E0,8.388499E0,2.109002E0,7.3390193E0,1.0636023E2,8.8421974E1,6.7669578E0,2.61524E0,5.9497147E0,2.02924E0,3.5416975E0,1.28907E0,2.7342694E0,6.434658E0,1.9538411E0,1.0572673E0,1.0517348E0,3.851064E0,3.4879553E0,1.0249379E2,3.8664389E0,8.6171165E1,2.2508032E0,2.9499679E0,3.8169897E0,1.1187234E0,1.4965167E0,4.875836E0,1.0738784E0,2.1190243E0,1.4226731E0,4.567441E0,1.8672172E0,1.503616E0,2.347448E0,7.29605E1,2.9533281E1,2.411629E0,1.4548098E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"59","size_leaf_vector":"1"}},{"base_weights":[-1.236139E-3,-2.757489E-2,3.3093035E-2,-9.715703E-3,-2.125307E-1,1.0120114E-1,-5.4143656E-2,-4.8284426E-2,6.661339E-2,2.0556238E-1,-5.5473655E-1,5.321534E-3,2.641933E-1,-4.3592507E-1,1.3652884E-2,-2.8321024E-3,-1.9575366E-1,-7.379977E-2,2.3411065E-1,1.4792576E-1,-7.2816886E-2,2.4610572E-2,-7.449898E-1,9.624686E-2,-2.631927E-1,3.321797E-1,-9.7104914E-2,2.118459E-2,-5.731716E-1,8.465059E-2,-2.7543044E-1,8.566184E-2,-1.830305E-1,-3.6179432E-1,8.63915E-2,-2.6407805E-1,2.3713133E-1,6.4371395E-1,3.1829562E-2,-8.054896E-2,7.733309E-2,-4.90499E-2,-2.534317E-1,-1.2659955E-1,1.541823E-1,1.8197669E-1,-4.385334E-1,1.5150397E-2,4.434574E-1,-6.939782E-1,-8.314296E-3,-2.1238522E-1,1.916293E-1,-4.0446216E-1,1.0056909E-1,-1.3750249E-1,4.5439318E-2,3.247197E-2,-1.00564905E-1,-2.3002064E-1,-6.680376E-2,7.690155E-2,-7.878056E-2,8.040831E-2,-1.3072447E-1,1.4061353E-1,-6.181485E-2,6.5727797E-3,2.3110688E-1,1.9892417E-1,-2.6898786E-2,-1.6494958E-2,1.0707589E-1,8.435697E-2,-1.6965355E-1,-1.20387636E-1,9.6185826E-2,1.955403E-1,-9.6149143E-4,-7.402509E-2,-2.4327321E-1,5.257024E-2,-1.0032522E-1,-7.287084E-2,9.1924876E-2,-1.64787E-1,5.120893E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":94,"left_children":[1,3,5,7,9,11,13,15,17,19,21,23,25,27,29,31,33,35,37,-1,39,-1,41,43,45,47,-1,-1,49,51,53,55,57,59,61,63,65,67,69,-1,-1,-1,-1,-1,71,-1,73,75,77,79,-1,81,83,85,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.214863E-1,4.5569947E-1,6.3743883E-1,3.7835503E-1,1.8531394E0,9.374813E-1,1.2298368E0,5.707487E-1,1.033178E0,5.3220904E-1,9.03656E-1,9.7378606E-1,9.960439E-1,5.3830564E-1,8.6734587E-1,1.0708079E0,9.7131205E-1,1.4977889E0,1.6314363E0,0E0,3.2938126E-1,0E0,1.7833233E-1,9.5838565E-1,1.8055518E0,7.136693E-1,0E0,0E0,3.5926485E-1,1.1141124E0,7.930092E-1,1.682285E0,1.0164001E0,5.517415E-1,5.5610645E-1,1.534644E0,1.0958055E0,4.980533E-1,1.1637017E0,0E0,0E0,0E0,0E0,0E0,1.1855056E0,0E0,9.798393E-1,9.3526363E-1,1.4053707E0,4.365039E-2,0E0,5.041352E-1,1.3394091E0,7.052803E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,20,20,22,22,23,23,24,24,25,25,28,28,29,29,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38,44,44,46,46,47,47,48,48,49,49,51,51,52,52,53,53],"right_children":[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,-1,40,-1,42,44,46,48,-1,-1,50,52,54,56,58,60,62,64,66,68,70,-1,-1,-1,-1,-1,72,-1,74,76,78,80,-1,82,84,86,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9592093E-1,7.1523346E-2,7.8226164E-2,-2.4094936E-1,-3.3165962E-1,1.9577834E-1,1.4704613E-1,3.2751042E-1,2.8468606E-1,1.4704613E-1,-9.395118E-1,9.2987575E-2,9.995786E-1,-8.1802267E-1,3.240115E-2,3.53506E-1,4.9114594E-1,-5.037827E-2,6.2878585E-1,1.4792576E-1,-5.976401E-1,2.4610572E-2,-2.6587364E-1,-9.6988404E-1,4.447161E-1,-9.3749535E-1,-9.7104914E-2,2.118459E-2,8.11642E-2,-5.9036255E-1,5.1640505E-1,-1.1510396E0,-8.1500554E-1,-6.147578E-1,-2.3947062E-1,-1.0838329E0,-4.2915997E-1,-7.717146E-1,-8.0783725E-1,-8.054896E-2,7.733309E-2,-4.90499E-2,-2.534317E-1,-1.2659955E-1,-3.3469358E-1,1.8197669E-1,-6.0997343E-1,8.179088E-1,5.1763475E-1,-5.7189107E-1,-8.314296E-3,-1.0002563E0,-1.6082309E-1,1.8515531E0,1.0056909E-1,-1.3750249E-1,4.5439318E-2,3.247197E-2,-1.00564905E-1,-2.3002064E-1,-6.680376E-2,7.690155E-2,-7.878056E-2,8.040831E-2,-1.3072447E-1,1.4061353E-1,-6.181485E-2,6.5727797E-3,2.3110688E-1,1.9892417E-1,-2.6898786E-2,-1.6494958E-2,1.0707589E-1,8.435697E-2,-1.6965355E-1,-1.20387636E-1,9.6185826E-2,1.955403E-1,-9.6149143E-4,-7.402509E-2,-2.4327321E-1,5.257024E-2,-1.0032522E-1,-7.287084E-2,9.1924876E-2,-1.64787E-1,5.120893E-2],"split_indices":[8,8,2,1,6,5,2,9,2,2,1,6,9,1,1,2,2,0,2,0,3,0,2,1,8,0,0,0,7,0,3,9,9,6,8,9,4,4,3,0,0,0,0,0,2,0,2,8,6,4,0,1,10,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.429469E2,1.3758376E2,1.0536315E2,1.2642846E2,1.1155301E1,5.9078697E1,4.628445E1,8.423323E1,4.2195225E1,5.2006555E0,5.9546456E0,3.7847614E1,2.1231085E1,6.160883E0,4.0123566E1,6.516696E1,1.9066269E1,2.325768E1,1.8937544E1,2.1789045E0,3.0217507E0,1.5006908E0,4.453955E0,2.875279E1,9.094822E0,1.9420145E1,1.8109405E0,1.4169148E0,4.7439685E0,3.2780087E1,7.343482E0,4.4056034E1,2.111093E1,1.1825021E1,7.241249E0,1.4521654E1,8.736027E0,5.5390763E0,1.3398468E1,2.016194E0,1.0055566E0,1.038804E0,3.4151511E0,2.259261E0,2.649353E1,1.1132132E0,7.9816093E0,5.3406944E0,1.407945E1,3.660879E0,1.0830895E0,8.418917E0,2.4361168E1,6.34125E0,1.002232E0,4.1102576E0,3.9945774E1,7.3439727E0,1.3766956E1,1.8700625E0,9.954958E0,5.04837E0,2.1928794E0,3.4053128E0,1.1116342E1,5.699955E0,3.036072E0,1.1357286E0,4.4033475E0,1.4430166E0,1.1955452E1,1.33987465E1,1.3094784E1,1.0148563E0,6.966753E0,2.1271214E0,3.2135732E0,9.287889E0,4.791561E0,1.4034281E0,2.2574508E0,1.9114566E0,6.5074606E0,4.857338E0,1.950383E1,5.1016145E0,1.2396356E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"87","size_leaf_vector":"1"}},{"base_weights":[-3.2691455E-3,-4.3049492E-2,2.4511565E-2,-6.421543E-2,3.2252604E-1,-2.4245053E-3,1.3356967E-1,-2.1815056E-2,-2.1709424E-1,5.8190703E-1,-7.8352734E-2,-5.50154E-2,7.36557E-2,2.9858762E-1,-1.3149604E-1,-1.0443654E-1,1.0308578E-1,1.5130208E-1,-4.027974E-1,2.1463741E-1,5.4580107E-2,-3.5537392E-1,-1.456667E-2,2.884875E-1,-2.8002825E-2,5.6542647E-1,1.4464307E-1,-4.12245E-1,2.3486976E-1,-1.85316E-1,1.0967092E-1,5.570471E-1,4.117346E-2,-1.2468221E-1,3.8113388E-1,6.1655217E-3,-5.950793E-1,-6.1965376E-1,2.1182463E-1,-2.1791682E-1,4.581054E-2,6.388076E-1,1.1277912E-1,-1.4279127E-1,2.269965E-1,2.279614E-2,6.7692035E-1,2.7002555E-1,-1.432595E-1,-6.905246E-2,-5.981732E-1,-2.6900547E-2,3.6163655E-1,-9.721004E-3,-9.273901E-2,7.624355E-2,-9.1545664E-2,2.015795E-1,4.7433354E-2,-1.1361251E-1,2.9376216E-2,1.828264E-1,-3.8935177E-2,-1.244139E-1,9.885284E-2,-2.6361346E-1,-6.275968E-2,-2.3018873E-1,-1.1906151E-2,1.3628325E-1,-3.289397E-2,-1.256945E-1,5.9003044E-2,4.452857E-2,-3.1645972E-2,-1.2589061E-2,2.5138047E-1,-3.7852623E-2,1.6019873E-1,-1.2998782E-1,-1.4019746E-2,2.3668118E-1,7.6486412E-3,2.4303696E-1,3.7561364E-2,-3.7899546E-4,1.4171124E-1,6.658252E-2,-1.1155833E-1,-6.461557E-2,-2.1281509E-1,4.701023E-3,1.5695593E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":95,"left_children":[1,3,5,7,9,11,13,15,17,19,-1,21,23,25,27,29,31,33,35,-1,-1,37,39,41,43,45,47,49,51,53,55,57,59,-1,61,63,65,67,69,71,73,75,77,79,81,-1,83,85,-1,87,89,-1,91,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.6521412E-1,7.8404576E-1,4.1564062E-1,6.0513854E-1,9.94541E-1,4.616176E-1,1.2649014E0,7.823424E-1,1.4508659E0,9.21942E-2,0E0,8.232758E-1,1.0382215E0,6.4017534E-1,1.2740642E0,8.082018E-1,8.2786053E-1,1.1402967E0,1.0873394E0,0E0,0E0,1.3380108E0,7.606411E-1,8.660035E-1,9.962903E-1,2.806821E-1,1.1032107E0,3.9532876E-1,2.6292285E-1,6.222395E-1,8.6281747E-1,5.6607485E-2,6.975248E-1,0E0,7.6535237E-1,8.711132E-1,8.432224E-1,4.6159863E-1,3.0733055E-1,1.2636968E0,7.604139E-1,6.961521E-1,1.2111213E0,6.305411E-1,1.1810479E0,0E0,2.5356507E-1,6.06248E-1,0E0,3.9402044E-1,3.0592442E-2,0E0,2.2808504E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32,34,34,35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42,43,43,44,44,46,46,47,47,49,49,50,50,52,52],"right_children":[2,4,6,8,10,12,14,16,18,20,-1,22,24,26,28,30,32,34,36,-1,-1,38,40,42,44,46,48,50,52,54,56,58,60,-1,62,64,66,68,70,72,74,76,78,80,82,-1,84,86,-1,88,90,-1,92,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.445578E-1,1.136912E0,1.348459E0,-5.461783E-1,-8.6605296E-2,4.447161E-1,3.825173E-1,1.6376211E-1,-2.5497583E-1,6.465325E-1,-7.8352734E-2,-8.498767E-1,-6.225345E-1,5.868313E-2,1.0045053E0,2.8468606E-1,1.9914937E-1,-6.147578E-1,-1.2823372E-1,2.1463741E-1,5.4580107E-2,2.221304E-2,-5.9413753E-2,-8.498767E-1,-1.6673584E-1,-5.9036255E-1,1.6376211E-1,-4.9090233E-1,-1.4774797E0,-2.9627237E-1,5.2817637E-1,-1.1411514E-1,-1.4228727E-1,-1.2468221E-1,-3.3206618E-1,-2.6587364E-1,-2.432298E-1,4.9114594E-1,1.4704613E-1,4.6192405E-1,-3.2438374E-1,-1.0917766E0,-2.7132162E-1,-2.8642884E-1,6.313125E-1,2.279614E-2,2.9675448E-1,-4.803529E-1,-1.432595E-1,2.753794E-1,-1.4983252E-1,-2.6900547E-2,-4.803529E-1,-9.721004E-3,-9.273901E-2,7.624355E-2,-9.1545664E-2,2.015795E-1,4.7433354E-2,-1.1361251E-1,2.9376216E-2,1.828264E-1,-3.8935177E-2,-1.244139E-1,9.885284E-2,-2.6361346E-1,-6.275968E-2,-2.3018873E-1,-1.1906151E-2,1.3628325E-1,-3.289397E-2,-1.256945E-1,5.9003044E-2,4.452857E-2,-3.1645972E-2,-1.2589061E-2,2.5138047E-1,-3.7852623E-2,1.6019873E-1,-1.2998782E-1,-1.4019746E-2,2.3668118E-1,7.6486412E-3,2.4303696E-1,3.7561364E-2,-3.7899546E-4,1.4171124E-1,6.658252E-2,-1.1155833E-1,-6.461557E-2,-2.1281509E-1,4.701023E-3,1.5695593E-1],"split_indices":[9,6,10,9,4,8,8,6,5,5,0,3,7,9,8,2,6,6,2,0,0,6,2,3,3,0,6,6,0,6,0,5,5,0,1,2,0,2,2,9,3,0,1,4,8,0,0,5,0,1,1,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.3803052E2,9.765076E1,1.4037978E2,9.308566E1,4.5650897E0,1.1335916E2,2.702061E1,7.3768845E1,1.931682E1,3.162733E0,1.4023565E0,6.719032E1,4.6168835E1,1.6575184E1,1.0445428E1,4.4497646E1,2.92712E1,6.536002E0,1.2780818E1,1.7797203E0,1.3830128E0,7.050431E0,6.0139893E1,1.4239382E1,3.1929453E1,5.086174E0,1.1489009E1,5.84281E0,4.602618E0,3.239489E1,1.2102754E1,2.5531695E0,2.6718029E1,1.6522373E0,4.8837647E0,4.396938E0,8.38388E0,4.746412E0,2.304019E0,1.3170948E1,4.6968945E1,3.875987E0,1.0363395E1,2.2321444E1,9.608009E0,1.2556443E0,3.8305297E0,1.0033106E1,1.4559038E0,2.5350053E0,3.3078046E0,1.3750288E0,3.2275896E0,1.5058753E1,1.7336138E1,9.263731E0,2.8390229E0,1.4510038E0,1.1021658E0,2.505541E0,2.4212488E1,3.2422647E0,1.6415002E0,1.7875761E0,2.609362E0,4.0960298E0,4.28785E0,3.5258398E0,1.2205721E0,1.077928E0,1.2260911E0,8.862387E0,4.3085613E0,2.7998072E1,1.8970873E1,1.0557792E0,2.8202078E0,7.0591927E0,3.304202E0,4.6743994E0,1.7647043E1,1.7664576E0,7.841552E0,2.7082872E0,1.1222425E0,4.710879E0,5.3222265E0,1.4289061E0,1.1060992E0,1.4057895E0,1.9020152E0,1.3765793E0,1.8510102E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"93","size_leaf_vector":"1"}},{"base_weights":[-1.6595112E-3,-1.002358E-2,1.0842837E-1,5.389351E-3,-9.157408E-2,1.6145012E-1,-6.269649E-2,-3.655592E-3,2.701499E-1,-1.5879901E-1,2.5194794E-1,1.1122864E-2,2.771252E-1,-1.3976997E-1,1.3945359E-2,-9.4723724E-2,5.3335977E-1,-2.0244633E-1,1.5634574E-1,1.7192855E-1,-6.2109125E-3,-1.2542471E-1,2.052785E-1,4.3114796E-1,-4.2636834E-2,-4.683946E-1,-3.6635984E-2,4.3034777E-1,2.4503763E-4,2.1346276E-1,4.0607117E-3,-5.9902072E-2,-5.137095E-1,-9.5409505E-2,1.9808964E-1,1.5827832E-1,-6.767915E-2,6.0053617E-1,-2.6628891E-2,-7.529229E-2,4.52497E-2,-3.9700884E-2,-1.785633E-1,-3.1111656E-2,1.3796568E-1,1.9639356E-1,6.1193444E-2,-1.9596683E-2,1.0078555E-2,-7.052503E-2,5.4458033E-2,-3.3142067E-2,-1.89239E-1,-1.135208E-2,1.0926241E-1,-7.028715E-2,6.464389E-2,6.161149E-2,2.1207E-1],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":96,"left_children":[1,3,5,7,9,11,-1,13,15,17,19,21,23,25,27,-1,29,31,-1,-1,33,-1,35,37,39,41,43,45,47,-1,-1,49,51,-1,53,-1,55,57,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.1733713E-1,2.759965E-1,3.1426468E-1,4.4471002E-1,8.4588623E-1,2.550851E-1,0E0,4.3355623E-1,1.1143923E0,9.8987246E-1,5.091243E-1,7.106543E-1,4.2954975E-1,6.850631E-1,9.151612E-1,0E0,4.1686273E-1,1.2435638E0,0E0,0E0,3.4137663E-1,0E0,5.489166E-1,5.5743945E-1,1.8392757E-1,1.2853885E-1,6.089849E-1,1.2591821E-1,3.44031E-1,0E0,0E0,9.237322E-1,3.5002613E-1,0E0,1.4140175E-1,0E0,2.4130915E-1,4.171455E-2,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,7,7,8,8,9,9,10,10,11,11,12,12,13,13,14,14,16,16,17,17,20,20,22,22,23,23,24,24,25,25,26,26,27,27,28,28,31,31,32,32,34,34,36,36,37,37],"right_children":[2,4,6,8,10,12,-1,14,16,18,20,22,24,26,28,-1,30,32,-1,-1,34,-1,36,38,40,42,44,46,48,-1,-1,50,52,-1,54,-1,56,58,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[1.9354022E0,1.4323081E0,7.664258E-1,1.348459E0,-6.3253975E-1,-1.9705367E-1,-6.269649E-2,-1.0437956E0,-1.0838329E0,9.845981E-1,-3.6573452E-1,-7.447781E-1,-4.5770618E-1,-1.4369774E-1,-9.874513E-1,-9.4723724E-2,1.3372214E-1,1.6376211E-1,1.5634574E-1,1.7192855E-1,-1.0559944E-1,-1.2542471E-1,-5.5560064E-1,2.1463482E-1,-3.2037044E-1,-3.2037044E-1,3.295149E-2,-6.9653356E-1,-4.26067E-1,2.1346276E-1,4.0607117E-3,-3.2037044E-1,-5.1429987E-1,-9.5409505E-2,-1.1001399E0,1.5827832E-1,-2.9598737E-2,-3.113201E-1,-2.6628891E-2,-7.529229E-2,4.52497E-2,-3.9700884E-2,-1.785633E-1,-3.1111656E-2,1.3796568E-1,1.9639356E-1,6.1193444E-2,-1.9596683E-2,1.0078555E-2,-7.052503E-2,5.4458033E-2,-3.3142067E-2,-1.89239E-1,-1.135208E-2,1.0926241E-1,-7.028715E-2,6.464389E-2,6.161149E-2,2.1207E-1],"split_indices":[10,10,2,10,7,2,0,5,9,5,7,3,4,4,5,0,8,6,0,0,7,0,3,1,0,0,3,1,8,0,0,0,1,0,5,0,5,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.3385489E2,2.1818681E2,1.56680765E1,1.8429015E2,3.3896667E1,1.38464985E1,1.8215778E0,1.7915588E2,5.1342616E0,2.879869E1,5.0979757E0,6.498037E0,7.3484616E0,1.9719212E1,1.5943668E2,1.5296717E0,3.6045902E0,2.7721464E1,1.0772274E0,1.7281904E0,3.3697853E0,1.6646919E0,4.833345E0,4.7244515E0,2.62401E0,3.8643355E0,1.5854876E1,4.109916E0,1.5532677E2,2.4097679E0,1.1948223E0,1.9831839E1,7.889624E0,1.1135327E0,2.2562528E0,1.7885252E0,3.0448198E0,3.4464989E0,1.2779526E0,1.1203396E0,1.5036705E0,1.61861E0,2.2457254E0,1.46650715E1,1.1898044E0,1.1140924E0,2.9958239E0,5.2045395E1,1.0328137E2,1.1507936E1,8.323903E0,2.2127297E0,5.6768947E0,1.2513002E0,1.0049524E0,2.0255587E0,1.0192611E0,1.3525025E0,2.0939965E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"59","size_leaf_vector":"1"}},{"base_weights":[1.2571441E-3,-2.1760797E-2,3.9160296E-2,-5.732452E-3,-3.3257148E-1,1.6062622E-1,-4.9389083E-2,3.2755184E-1,-1.4228134E-2,-5.224634E-1,9.630605E-2,1.9297536E-1,-1.2928416E-1,-2.2474247E-1,5.86031E-2,1.7950858E-1,-2.1293832E-2,5.311068E-2,-6.152187E-2,-2.6110268E-1,5.253937E-2,6.743246E-2,4.0911517E-1,2.1818598E-1,-3.272759E-1,-1.2557711E-2,4.1682446E-1,5.80522E-3,4.7162166E-1,-8.411641E-2,2.6561278E-1,-8.634877E-2,1.1489395E-1,1.5989213E-1,-3.047041E-1,-9.662758E-2,5.4075724E-1,-5.7839146E-3,1.2424605E-1,-4.1008052E-1,1.0399105E-1,2.650474E-1,-8.962899E-2,4.8504494E-2,1.9243409E-1,-1.2494437E-1,1.19788805E-2,2.1015416E-1,-9.587005E-3,2.8788142E-3,-5.492944E-2,1.4780451E-1,-1.0188841E-1,2.21522E-3,9.1644436E-2,-1.7608202E-1,7.416704E-2,2.1196397E-1,-6.822214E-2,6.312717E-2,-1.456561E-1,-7.6200396E-2,1.5779507E-1,2.2453836E-3,-1.3252044E-1,7.810041E-2,-5.655973E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":97,"left_children":[1,3,5,7,9,11,13,15,17,19,-1,21,-1,23,25,-1,-1,27,29,-1,31,33,35,37,39,41,43,45,47,49,51,-1,-1,53,55,-1,57,-1,-1,59,-1,61,63,65,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.0385814E-1,7.2250175E-1,9.5737135E-1,3.990795E-1,1.0505917E0,7.99913E-1,9.9445426E-1,4.206648E-1,4.3891522E-1,1.2244413E0,0E0,9.547652E-1,0E0,9.7773945E-1,8.4293544E-1,0E0,0E0,1.1252141E0,6.1972237E-1,0E0,4.66126E-1,8.7788326E-1,1.4355748E0,2.1047792E-1,1.0687766E0,6.311014E-1,4.2188668E-1,7.688462E-1,6.878593E-1,7.1004343E-1,9.1797614E-1,0E0,0E0,4.4088888E-1,9.457773E-1,0E0,1.6224892E0,0E0,0E0,8.158572E-1,0E0,1.003273E0,7.885979E-1,2.0755172E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,11,11,13,13,14,14,17,17,18,18,20,20,21,21,22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29,30,30,33,33,34,34,36,36,39,39,41,41,42,42,43,43],"right_children":[2,4,6,8,10,12,14,16,18,20,-1,22,-1,24,26,-1,-1,28,30,-1,32,34,36,38,40,42,44,46,48,50,52,-1,-1,54,56,-1,58,-1,-1,60,-1,62,64,66,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-2.7132162E-1,-3.0169392E-1,-8.1596833E-1,-1.3231984E0,1.5441067E0,-1.7224395E-1,3.9471725E-1,-2.7735096E-1,2.5842196E-1,-3.3238387E-1,9.630605E-2,-3.2037044E-1,-1.2928416E-1,-6.575297E-1,-5.1127385E-2,1.7950858E-1,-2.1293832E-2,1.3264844E-1,9.9102E-1,-2.6110268E-1,3.4227094E-1,-4.7465166E-1,-8.0026084E-1,4.4686025E-1,1.6935092E0,-2.834341E-2,-9.66626E-4,-8.459839E-1,1.4704613E-1,-2.7735096E-1,1.1105256E0,-8.634877E-2,1.1489395E-1,-8.603547E-1,-1.1411514E-1,-9.662758E-2,3.929833E-1,-5.7839146E-3,1.2424605E-1,-1.9175527E0,1.0399105E-1,1.4028195E0,1.2350967E0,-1.9705367E-1,1.9243409E-1,-1.2494437E-1,1.19788805E-2,2.1015416E-1,-9.587005E-3,2.8788142E-3,-5.492944E-2,1.4780451E-1,-1.0188841E-1,2.21522E-3,9.1644436E-2,-1.7608202E-1,7.416704E-2,2.1196397E-1,-6.822214E-2,6.312717E-2,-1.456561E-1,-7.6200396E-2,1.5779507E-1,2.2453836E-3,-1.3252044E-1,7.810041E-2,-5.655973E-2],"split_indices":[1,1,7,0,10,4,9,9,10,7,0,0,0,4,3,0,0,10,0,0,10,0,4,6,3,1,4,7,2,9,2,0,0,0,5,0,5,0,0,2,0,9,7,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.3168425E2,1.4441148E2,8.7272766E1,1.3829797E2,6.113511E0,3.645388E1,5.0818886E1,2.5041828E0,1.357938E2,4.892648E0,1.2208631E0,3.5202675E1,1.2512028E0,1.8956684E1,3.1862204E1,1.1924626E0,1.3117201E0,5.5973446E1,7.982035E1,2.7283528E0,2.1642954E0,2.309825E1,1.2104428E1,3.3513644E0,1.560532E1,2.7386757E1,4.4754457E0,5.1200035E1,4.7734113E0,7.535837E1,4.461982E0,1.1296538E0,1.0346415E0,1.8958284E1,4.139964E0,1.6260113E0,1.0478416E1,1.9228946E0,1.4284699E0,1.4246345E1,1.3589753E0,5.421157E0,2.1965601E1,2.1560485E0,2.3193972E0,2.9903162E0,4.820972E1,3.0128872E0,1.760524E0,3.9173405E1,3.6184963E1,3.3799107E0,1.0820712E0,9.7723E0,9.185985E0,2.6972504E0,1.4427139E0,8.684333E0,1.7940841E0,1.3503332E0,1.2896011E1,1.8224852E0,3.5986717E0,1.7984167E1,3.981434E0,1.0866586E0,1.0693899E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"67","size_leaf_vector":"1"}},{"base_weights":[2.0652409E-3,-3.649476E-2,2.9015647E-2,-5.4227512E-2,2.68825E-1,4.0294703E-2,-1.5918875E-1,7.064876E-2,-9.760618E-2,4.8653647E-1,-6.434938E-2,2.5964452E-2,2.6673657E-1,-4.420625E-1,6.119467E-2,-2.3981826E-1,1.8152231E-1,5.812205E-3,-2.2398807E-1,1.8255888E-1,4.4340007E-2,9.366606E-2,-2.9040191E-2,3.3915944E-3,3.916954E-1,-2.4368286E-2,-1.7919533E-1,2.9759708E-1,-9.508598E-2,1.5143421E-1,-5.190422E-1,3.1471306E-1,-2.396898E-1,-6.704487E-2,3.2192734E-1,1.2176634E-2,-4.478096E-1,1.8294454E-1,-6.5565675E-2,-6.084645E-2,1.8895178E-1,6.446264E-2,-6.106286E-2,5.341813E-1,-1.0555691E-2,-1.385608E-2,1.4289771E-1,-5.9660513E-2,1.3827245E-1,-5.4366127E-2,-1.9360349E-1,2.3917967E-2,1.8537547E-1,-1.7357294E-1,7.3580705E-2,1.1145619E-1,-4.2048864E-2,1.9610198E-1,-2.0006081E-2,-7.041508E-2,5.9858922E-2,-1.5882187E-1,3.8774142E-3,7.4940085E-2,-5.479112E-2,4.2945556E-2,-9.159805E-2,-3.367706E-3,-1.5941079E-1,1.1541305E-1,-4.065776E-2,1.9086477E-1,5.0003633E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":98,"left_children":[1,3,5,7,9,11,13,15,17,19,-1,21,23,25,27,29,31,33,35,-1,-1,37,39,41,43,-1,-1,45,-1,47,49,51,53,55,57,59,61,63,65,67,69,-1,-1,71,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[2.3903832E-1,5.2703774E-1,2.9372883E-1,4.959851E-1,6.6872615E-1,4.1003382E-1,5.167256E-1,8.647804E-1,8.826338E-1,7.550788E-2,0E0,4.5686603E-1,2.4678797E-1,1.509921E-1,5.6753504E-1,8.115264E-1,1.0936176E0,8.981312E-1,1.6108512E0,0E0,0E0,7.925116E-1,4.833228E-1,1.9406828E-1,3.2928944E-1,0E0,0E0,2.5382555E-1,0E0,4.784754E-1,5.9728622E-2,9.639797E-1,9.517709E-1,1.0704588E0,9.848563E-1,7.754515E-1,5.9405494E-1,9.119046E-1,1.0676134E0,1.3982083E0,6.1798286E-1,0E0,0E0,3.9788365E-2,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18,21,21,22,22,23,23,24,24,27,27,29,29,30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40,43,43],"right_children":[2,4,6,8,10,12,14,16,18,20,-1,22,24,26,28,30,32,34,36,-1,-1,38,40,42,44,-1,-1,46,-1,48,50,52,54,56,58,60,62,64,66,68,70,-1,-1,72,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-3.445578E-1,1.136912E0,1.1722991E0,-7.060735E-1,-8.6605296E-2,1.2381474E0,7.681876E-1,-7.269058E-1,-2.3947062E-1,6.465325E-1,-6.434938E-2,7.8226164E-2,7.8226164E-2,-4.1909558E-1,3.9471725E-1,5.9381795E-1,3.240115E-2,-4.26067E-1,-7.4779874E-1,1.8255888E-1,4.4340007E-2,-2.5788262E-1,5.1763475E-1,-1.9705367E-1,3.2031852E-1,-2.4368286E-2,-1.7919533E-1,-1.4293733E-1,-9.508598E-2,2.5842196E-1,-1.0146359E0,-3.03364E-1,-1.0146359E0,-1.0002563E0,-3.2438374E-1,1.9592093E-1,7.310489E-1,1.3154992E0,-1.039405E0,2.851881E-1,2.9675448E-1,6.446264E-2,-6.106286E-2,7.6534563E-1,-1.0555691E-2,-1.385608E-2,1.4289771E-1,-5.9660513E-2,1.3827245E-1,-5.4366127E-2,-1.9360349E-1,2.3917967E-2,1.8537547E-1,-1.7357294E-1,7.3580705E-2,1.1145619E-1,-4.2048864E-2,1.9610198E-1,-2.0006081E-2,-7.041508E-2,5.9858922E-2,-1.5882187E-1,3.8774142E-3,7.4940085E-2,-5.479112E-2,4.2945556E-2,-9.159805E-2,-3.367706E-3,-1.5941079E-1,1.1541305E-1,-4.065776E-2,1.9086477E-1,5.0003633E-2],"split_indices":[9,6,6,0,4,5,7,1,8,5,0,2,2,7,9,10,1,8,9,0,0,4,6,2,8,0,0,9,0,10,0,3,0,1,3,8,5,8,6,3,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.2800078E2,9.3651825E1,1.3434895E2,8.928841E1,4.3634152E0,1.27494156E2,6.8548017E0,2.2857828E1,6.643058E1,3.0037398E0,1.3596752E0,1.2095429E2,6.5398626E0,2.5613256E0,4.293476E0,5.7088823E0,1.7148945E1,3.70591E1,2.9371479E1,1.6228243E0,1.3809156E0,5.3904232E1,6.705006E1,2.435114E0,4.1047487E0,1.228743E0,1.3325827E0,2.7729542E0,1.520522E0,2.5680938E0,3.1407886E0,1.32211485E1,3.9277973E0,3.0758017E1,6.3010864E0,1.4751777E1,1.4619702E1,3.4443462E1,1.9460773E1,5.914181E1,7.9082475E0,1.1853399E0,1.249774E0,2.889067E0,1.2156818E0,1.202168E0,1.5707862E0,1.3722277E0,1.195866E0,1.5172938E0,1.6234947E0,8.160014E0,5.061134E0,2.1984735E0,1.729324E0,3.8120477E0,2.6945969E1,3.0313075E0,3.269779E0,6.255494E0,8.496284E0,1.2293467E1,2.326236E0,2.938617E1,5.0572915E0,1.0619338E1,8.8414345E0,5.4425972E1,4.715839E0,4.816693E0,3.0915546E0,1.6889532E0,1.2001138E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"73","size_leaf_vector":"1"}},{"base_weights":[1.5480443E-3,-1.5716831E-1,6.045215E-3,6.65235E-2,-3.2950538E-1,-2.586296E-3,1.1351206E-1,-5.002689E-1,4.5390226E-2,4.9986714E-3,-2.754319E-1,-2.6525733E-1,2.2399543E-1,-2.900566E-2,-1.9222583E-1,-5.008736E-3,2.077984E-1,-5.8134323E-1,7.112549E-2,2.1752479E-2,-1.2349465E-1,-6.550194E-2,3.140693E-1,-2.114434E-2,8.160043E-2,3.0377644E-1,-5.2877106E-2,-3.3354465E-2,-2.2442462E-1,4.2819333E-1,-5.0160162E-2,-1.953737E-3,-7.308755E-2,5.7555E-2,-1.6091218E-2,1.19833626E-1,-3.0263538E-2,1.545841E-1,2.7536603E-2],"categories":[],"categories_nodes":[],"categories_segments":[],"categories_sizes":[],"default_left":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"id":99,"left_children":[1,3,5,-1,7,9,11,13,-1,15,17,19,21,-1,-1,23,25,27,-1,-1,-1,-1,29,31,33,35,-1,-1,-1,37,-1,-1,-1,-1,-1,-1,-1,-1,-1],"loss_changes":[1.6239543E-1,4.8245835E-1,2.0521554E-1,0E0,4.6253365E-1,4.266008E-1,7.476699E-1,1.483376E-1,0E0,4.087553E-1,1.0119455E0,2.3612753E-1,5.9747964E-1,0E0,0E0,2.7063537E-1,4.0264213E-1,2.1323824E-1,0E0,0E0,0E0,0E0,6.9987714E-1,5.290242E-1,4.6392465E-1,3.4784323E-1,0E0,0E0,0E0,2.318182E-1,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0,0E0],"parents":[2147483647,0,0,1,1,2,2,4,4,5,5,6,6,7,7,9,9,10,10,11,11,12,12,15,15,16,16,17,17,22,22,23,23,24,24,25,25,29,29],"right_children":[2,4,6,-1,8,10,12,14,-1,16,18,20,22,-1,-1,24,26,28,-1,-1,-1,-1,30,32,34,36,-1,-1,-1,38,-1,-1,-1,-1,-1,-1,-1,-1,-1],"split_conditions":[-9.1293585E-1,-1.5897034E0,1.5020956E0,6.65235E-2,-5.750444E-1,1.3154992E0,6.8749217E-3,-4.732087E-1,4.5390226E-2,1.1289028E0,6.6354454E-1,-2.6587364E-1,-8.4659064E-1,-2.900566E-2,-1.9222583E-1,7.8739315E-1,4.822475E-1,-1.4293733E-1,7.112549E-2,2.1752479E-2,-1.2349465E-1,-6.550194E-2,2.3728786E0,6.465325E-1,-5.547491E-3,3.968685E-1,-5.2877106E-2,-3.3354465E-2,-2.2442462E-1,7.9795814E-1,-5.0160162E-2,-1.953737E-3,-7.308755E-2,5.7555E-2,-1.6091218E-2,1.19833626E-1,-3.0263538E-2,1.545841E-1,2.7536603E-2],"split_indices":[3,7,8,0,1,8,10,6,0,8,9,2,5,0,0,5,6,9,0,0,0,0,8,5,7,1,0,0,0,9,0,0,0,0,0,0,0,0,0],"split_type":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"sum_hessian":[2.2518532E2,5.269328E0,2.19916E2,1.557718E0,3.71161E0,2.0446938E2,1.5446616E1,2.7090003E0,1.00261E0,1.998941E2,4.5752735E0,3.171817E0,1.2274799E1,1.1354119E0,1.5735884E0,1.9142339E2,8.470725E0,2.7810364E0,1.7942373E0,1.1116438E0,2.0601733E0,1.8357551E0,1.0439044E1,1.6199835E2,2.942502E1,6.9443216E0,1.5264032E0,1.1644317E0,1.6166048E0,8.527252E0,1.911792E0,1.5296744E2,9.030928E0,1.5979491E1,1.344553E1,5.6268816E0,1.3174399E0,6.3513217E0,2.17593E0],"tree_param":{"num_deleted":"0","num_feature":"11","num_nodes":"39","size_leaf_vector":"1"}}]},"name":"gbtree"},"learner_model_param":{"base_score":"[1.964595E-1]","boost_from_average":"1","num_class":"0","num_feature":"11","num_target":"1"},"objective":{"name":"binary:logistic","reg_loss_param":{"scale_pos_weight":"1"}}},"version":[3,1,1]} \ No newline at end of file diff --git a/data/wine_quality/y_test.npy b/data/wine_quality/y_test.npy new file mode 100644 index 0000000..d94dd22 Binary files /dev/null and b/data/wine_quality/y_test.npy differ diff --git a/data/wine_quality/y_train.npy b/data/wine_quality/y_train.npy new file mode 100644 index 0000000..45bedb7 Binary files /dev/null and b/data/wine_quality/y_train.npy differ diff --git a/experiments/__init__.py b/experiments/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/experiments/configs/tabular/dataset_config.yaml b/experiments/configs/tabular/dataset_config.yaml new file mode 100644 index 0000000..e9ea2d1 --- /dev/null +++ b/experiments/configs/tabular/dataset_config.yaml @@ -0,0 +1,41 @@ +# Wine Quality Dataset Configuration + +dataset: + name: "Wine Quality" + uci_id: 186 + + # Data split configuration + split: + test_size: 0.2 + random_state: 42 + stratify: true + + # Target label configuration + # Binary classification: quality scores 7, 8, 9 -> 1, others -> 0 + target: + type: "binary" + positive_classes: [7, 8, 9] + column_name: "quality" + + # Feature information + features: + n_features: 11 + feature_types: + - "fixed acidity" + - "volatile acidity" + - "citric acid" + - "residual sugar" + - "chlorides" + - "free sulfur dioxide" + - "total sulfur dioxide" + - "density" + - "pH" + - "sulphates" + - "alcohol" + all_numerical: true + + # Expected data size + expected_size: + total_samples: 6497 + train_samples: 5197 + test_samples: 1300 diff --git a/experiments/configs/tabular/explainer_config.yaml b/experiments/configs/tabular/explainer_config.yaml new file mode 100644 index 0000000..fd2d24d --- /dev/null +++ b/experiments/configs/tabular/explainer_config.yaml @@ -0,0 +1,131 @@ +# Explainer Configuration for Different Frameworks + +# Default hyperparameters (matching benchmark) +defaults: + n_samples: 64 # For LIME and SHAP + n_steps: 50 # For Integrated Gradients + batch_size: 32 # For batch processing + seed: 42 + +# PnPXAI configuration +pnpxai: + # Hyperparameter optimization config + optuna: + sampler: grid + seed: 42 + num_threads: 1 + show_progress: false + n_trials: 25 + + # Search space for HPO + search_space: + explainer.baseline_fn: + - "mean" + - "zeros" + - "kmeans" + explainer.baseline_fn.n_clusters: + - 10 + - 20 + - 30 + - 40 + - 50 + explainer.epsilon: + - 0.000001 + - 0.0001 + - 0.001 + - 0.01 + - 0.1 + - 1.0 + explainer.noise_level: + - 0.1 + - 0.2 + - 0.3 + - 0.4 + - 0.5 + explainer.n_iter: + - 10 + - 20 + - 30 + postprocessor.0.normalization_method: + - "identity" + - "pos" + - "minmax" + + # Metric configuration for optimization + metrics: + compound: + name: "cmpd" + components: + - metric: "abpc" + weight: 0.7 + - metric: "cmpx" + weight: -0.3 + direction: "maximize" + + # Fixed parameters (not tuned) + fixed_params: + lime: + n_samples: 64 + kernel_shap: + n_samples: 64 + integrated_gradients: + n_steps: 50 + + # Background data size for baseline functions + background_data_size: 50 + +# Captum configuration +captum: + # Parameters for each explainer + lime: + n_samples: 64 + + kernel_shap: + n_samples: 64 + + integrated_gradients: + n_steps: 50 + multiply_by_inputs: true + + saliency: + abs: false + + smooth_grad: + nt_type: "smoothgrad" + stdev: 1.0 # default + + input_x_gradient: {} + + lrp: + rule: "EpsilonRule" + +# OmniXAI configuration (XGBoost only) +omnixai: + lime: + num_samples: 64 + + shap: + nsamples: 64 + +# OpenXAI configuration (TabResNet only) +openxai: + lime: + n_samples: 64 + + shap: + n_samples: 64 + + # Other explainers use default params + +# AutoXAI configuration (deprecated) +autoxai: + background_size: 50 + properties: + - "robustness" + - "fidelity" + - "conciseness" + weights: [1, 2, 0.5] + distance: "cosine" + scaling: "Std" + early_stopping: true + improvement_stopping: true diff --git a/experiments/configs/tabular/model_config.yaml b/experiments/configs/tabular/model_config.yaml new file mode 100644 index 0000000..4e12d1e --- /dev/null +++ b/experiments/configs/tabular/model_config.yaml @@ -0,0 +1,51 @@ +# Model Training Configuration + +# Random seeds for reproducibility +seeds: + data_split: 42 # For train/test split + model_training: 0 # For model initialization and training + explanation: 42 # For explanation generation + +# XGBoost configuration +xgb: + model_class: "XGBClassifier" + # Using default parameters from benchmark + params: {} + save_format: "json" + filename: "xgb_model.json" + +# TabResNet configuration +tab_resnet: + num_blocks: 1 + hidden_dim: null # Will be set based on input_dim + + # Training hyperparameters + training: + epochs: 1000 + learning_rate: 0.01 + weight_decay: 0.01 + optimizer: "SGD" + loss_function: "CrossEntropyLoss" + batch_size: null # Full batch training + + save_format: "pth" + filename: "resnet_model.pth" + +# Logistic Regression configuration (optional) +lr: + # Using PyTorch implementation + training: + epochs: 1000 + learning_rate: 0.01 + weight_decay: 0.01 + optimizer: "SGD" + loss_function: "CrossEntropyLoss" + + save_format: "pth" + filename: "lr_model.pth" + +# Common settings +common: + output_dim: 2 # Binary classification + device: "cuda" # or "cpu" + verbose: true diff --git a/experiments/configs/tabular/optuna_config.yaml b/experiments/configs/tabular/optuna_config.yaml new file mode 100644 index 0000000..0c13e43 --- /dev/null +++ b/experiments/configs/tabular/optuna_config.yaml @@ -0,0 +1,22 @@ +# Optuna Hyperparameter Optimization Configuration +# (Copy of benchmark config for exact reproduction) + +sampler: grid +seed: 42 +num_threads: 1 +show_progress: false +n_trials: 25 # If search space is large, total iteration goes to n_trials + +search_space: + explainer.baseline_fn: + ["mean", "zeros", "kmeans"] + explainer.baseline_fn.n_clusters: + [10, 20, 30, 40, 50] + explainer.epsilon: + [0.000001, 0.0001, 0.001, 0.01, 0.1, 1.0] + explainer.noise_level: + [0.1, 0.2, 0.3, 0.4, 0.5] + explainer.n_iter: + [10, 20, 30] + postprocessor.0.normalization_method: + ["identity", "pos", "minmax"] diff --git a/experiments/datasets/__init__.py b/experiments/datasets/__init__.py index 494f706..71a3201 100644 --- a/experiments/datasets/__init__.py +++ b/experiments/datasets/__init__.py @@ -1 +1,2 @@ -from .liver_tumor import LiverTumorDataset, LiverTumorDatasetHf \ No newline at end of file +from .liver_tumor import LiverTumorDataset, LiverTumorDatasetHf +from .aki import AKIDataset, AKI_COLUMNS \ No newline at end of file diff --git a/experiments/datasets/aki.py b/experiments/datasets/aki.py new file mode 100644 index 0000000..7eb9974 --- /dev/null +++ b/experiments/datasets/aki.py @@ -0,0 +1,117 @@ +import numpy as np +from torch.utils.data import Dataset + + +class AKIDataset(Dataset): + def __init__(self, X: np.ndarray, Y: np.ndarray): + self.X = X.astype(np.float32) + self.Y = Y.astype(int) + + self.balanced_X = self.X + self.balanced_Y = self.Y + + def rebalance(self): + aki_ids = np.argwhere(self.Y != 0).ravel() + non_aki_ids = np.argwhere(self.Y == 0).ravel() + + if len(aki_ids) > len(non_aki_ids): + return + + target_len = len(aki_ids) + + non_aki_ids = np.random.choice(non_aki_ids, target_len, replace=False) + rebalanced_ids = np.concatenate((aki_ids, non_aki_ids)) + np.random.shuffle(rebalanced_ids) + self.balanced_X = self.X[rebalanced_ids] + self.balanced_Y = self.Y[rebalanced_ids] + + def __len__(self): + return len(self.balanced_X) + + def __getitem__(self, idx): + return (self.balanced_X[idx], self.balanced_Y[idx]) + + +AKI_COLUMNS = [ + "AKI", + "AKI_STAGE_7DAY", + "CREATININE_MAX", + "CREATININE_MIN", + "CREAT", + "EGFR", + "POTASSIUM_MAX", + "GLUCOSE_MAX", + "PLATELET_MIN", + "BUN_MAX", + "WBC_MIN", + "PLATELET_MAX", + "TEMPC_MEAN", + "GLUCOSE_MEAN", + "PTT_MAX", + "TEMPC_MIN", + "BUN_MIN", + "HEMATOCRIT_MIN", + "SPO2_MEAN", + "MEANBP_MEAN", + "AGE", + "DBSOURCE", + "HEARTRATE_MEAN", + "PT_MAX", + "TEMPC_MAX", + "RESPRATE_MEAN", + "CHLORIDE_MAX", + "GLUCOSE_MIN", + "WBC_MAX", + "DIASBP_MEAN", + "SYSBP_MAX", + "DIASBP_MIN", + "CHLORIDE_MIN", + "SPO2_MIN", + "HEARTRATE_MAX", + "HEMOGLOBIN_MAX", + "SYSBP_MEAN", + "HEMATOCRIT_MAX", + "DIASBP_MAX", + "HEARTRATE_MIN", + "SYSBP_MIN", + "SODIUM_MIN", + "MEANBP_MAX", + "BICARBONATE_MAX", + "MEANBP_MIN", + "SODIUM_MAX", + "ANIONGAP_MAX", + "ANIONGAP_MIN", + "HEMOGLOBIN_MIN", + "LACTATE_MIN", + "BICARBONATE_MIN", + "PTT_MIN", + "PT_MIN", + "BILIRUBIN_MAX", + "RESPRATE_MIN", + "LACTATE_MAX", + "RESPRATE_MAX", + "ALBUMIN_MIN", + "POTASSIUM_MIN", + "INR_MAX", + "ALBUMIN_MAX", + "BILIRUBIN_MIN", + "INR_MIN", + "BANDS_MIN", + "ETHNICITY", + "BANDS_MAX", + "HYPERTENSION", + "DIABETES_UNCOMPLICATED", + "VALVULAR_DISEASE", + "CONGESTIVE_HEART_FAILURE", + "SPO2_MAX", + "ALCOHOL_ABUSE", + "GENDER", + "CARDIAC_ARRHYTHMIAS", + "PERIPHERAL_VASCULAR", + "OBESITY", + "HYPOTHYROIDISM", + "DIABETES_COMPLICATED", + "LIVER_DISEASE", + "DRUG_ABUSE", + "RENAL_FAILURE", +] diff --git a/experiments/models/__init__.py b/experiments/models/__init__.py index dcc83fe..432d58e 100644 --- a/experiments/models/__init__.py +++ b/experiments/models/__init__.py @@ -1 +1,4 @@ -from .liver_tumor import ResNet50LiverTumor \ No newline at end of file +from .liver_tumor import ResNet50LiverTumor +from .aki import AKIClassifier +from .ecg import ResNetPlus, PatchTST +from .tab_resnet import TabResNet \ No newline at end of file diff --git a/experiments/models/aki.py b/experiments/models/aki.py new file mode 100644 index 0000000..fb8f195 --- /dev/null +++ b/experiments/models/aki.py @@ -0,0 +1,39 @@ +from torch import nn, Tensor +from huggingface_hub import PyTorchModelHubMixin +from typing import Optional + + +class AKIClassifier(nn.Module, PyTorchModelHubMixin): + + def __init__(self, input_size: int, n_classes: int): + super(AKIClassifier, self).__init__() + + is_binary = n_classes <= 2 + + layers = [ + self.nn_block(input_size, 256), + self.nn_block(256), + self.nn_block(256), + self.nn_block(256), + self.nn_block(256), + self.nn_block(256), + self.nn_block(256), + ] + + if is_binary: + layers.extend((nn.Linear(256, 1), nn.Sigmoid(), nn.Flatten(0, -1))) + else: + layers.extend((nn.Linear(256, n_classes), nn.Softmax(dim=-1))) + self.layers = nn.Sequential(*layers) + + def nn_block( + self, in_c: int, out_c: Optional[int] = None, dropout: float = 0.2 + ) -> nn.Module: + return nn.Sequential( + nn.Linear(in_c, out_c or in_c), + nn.ReLU(), + nn.Dropout(dropout), + ) + + def forward(self, x: Tensor) -> Tensor: + return self.layers(x) diff --git a/experiments/models/ecg/__init__.py b/experiments/models/ecg/__init__.py new file mode 100644 index 0000000..88824f2 --- /dev/null +++ b/experiments/models/ecg/__init__.py @@ -0,0 +1,2 @@ +from .patchtst import PatchTST +from .resnet_plus import ResNetPlus diff --git a/experiments/models/ecg/patchtst.py b/experiments/models/ecg/patchtst.py new file mode 100644 index 0000000..fecc95d --- /dev/null +++ b/experiments/models/ecg/patchtst.py @@ -0,0 +1,704 @@ +from typing import Optional + +import torch +from torch import nn, Tensor +import torch.nn.functional as F + +from huggingface_hub import PyTorchModelHubMixin + + +class Transpose(nn.Module): + def __init__(self, *dims, contiguous=False): + super(Transpose, self).__init__() + self.dims, self.contiguous = dims, contiguous + + def forward(self, x): + if self.contiguous: + return x.transpose(*self.dims).contiguous() + else: + return x.transpose(*self.dims) + + def __repr__(self): + if self.contiguous: + return f"{self.__class__.__name__}(dims={', '.join([str(d) for d in self.dims])}).contiguous()" + else: + return ( + f"{self.__class__.__name__}({', '.join([str(d) for d in self.dims])})" + ) + + +pytorch_acts = [ + nn.ELU, + nn.LeakyReLU, + nn.PReLU, + nn.ReLU, + nn.ReLU6, + nn.SELU, + nn.CELU, + nn.GELU, + nn.Sigmoid, + nn.Softplus, + nn.Tanh, + nn.Softmax, +] +pytorch_act_names = [a.__name__.lower() for a in pytorch_acts] + + +def get_act_fn(act, **act_kwargs): + if act is None: + return + elif isinstance(act, nn.Module): + return act + elif callable(act): + return act(**act_kwargs) + idx = pytorch_act_names.index(act.lower()) + return pytorch_acts[idx](**act_kwargs) + + +class RevIN(nn.Module): + def __init__( + self, + c_in: int, + affine: bool = True, + subtract_last: bool = False, + dim: int = 2, + eps: float = 1e-5, + ): + super().__init__() + self.c_in, self.affine, self.subtract_last, self.dim, self.eps = ( + c_in, + affine, + subtract_last, + dim, + eps, + ) + if self.affine: + self.weight = nn.Parameter(torch.ones(1, c_in, 1)) + self.bias = nn.Parameter(torch.zeros(1, c_in, 1)) + + def forward(self, x: Tensor, mode: Tensor): + if mode: + return self.normalize(x) + else: + return self.denormalize(x) + + def normalize(self, x): + if self.subtract_last: + self.sub = x[..., -1].unsqueeze(-1).detach() + else: + self.sub = torch.mean(x, dim=-1, keepdim=True).detach() + self.std = ( + torch.std(x, dim=-1, keepdim=True, unbiased=False).detach() + self.eps + ) + if self.affine: + x = x.sub(self.sub) + x = x.div(self.std) + x = x.mul(self.weight) + x = x.add(self.bias) + return x + else: + x = x.sub(self.sub) + x = x.div(self.std) + return x + + def denormalize(self, x): + if self.affine: + x = x.sub(self.bias) + x = x.div(self.weight) + x = x.mul(self.std) + x = x.add(self.sub) + return x + else: + x = x.mul(self.std) + x = x.add(self.sub) + return x + + +class MovingAverage(nn.Module): + def __init__( + self, + kernel_size: int, + ): + super().__init__() + padding_left = (kernel_size - 1) // 2 + padding_right = kernel_size - padding_left - 1 + self.padding = torch.nn.ReplicationPad1d((padding_left, padding_right)) + self.avg = nn.AvgPool1d(kernel_size=kernel_size, stride=1) + + def forward(self, x: Tensor): + return self.avg(self.padding(x)) + + +class SeriesDecomposition(nn.Module): + def __init__( + self, + kernel_size: int, # the size of the window + ): + super().__init__() + self.moving_avg = MovingAverage(kernel_size) + + def forward(self, x: Tensor): + moving_mean = self.moving_avg(x) + residual = x - moving_mean + return residual, moving_mean + + +class _ScaledDotProductAttention(nn.Module): + def __init__(self, d_model, n_heads, attn_dropout=0.0, res_attention=False): + super().__init__() + self.attn_dropout = nn.Dropout(attn_dropout) + self.res_attention = res_attention + head_dim = d_model // n_heads + self.scale = nn.Parameter(torch.tensor(head_dim**-0.5), requires_grad=False) + + def forward(self, q: Tensor, k: Tensor, v: Tensor, prev: Optional[Tensor] = None): + attn_scores = torch.matmul(q, k) * self.scale + + if prev is not None: + attn_scores = attn_scores + prev + + attn_weights = F.softmax(attn_scores, dim=-1) + attn_weights = self.attn_dropout(attn_weights) + + output = torch.matmul(attn_weights, v) + + if self.res_attention: + return output, attn_weights, attn_scores + else: + return output, attn_weights + + +class _MultiheadAttention(nn.Module): + def __init__( + self, + d_model, + n_heads, + d_k=None, + d_v=None, + res_attention=False, + attn_dropout=0.0, + proj_dropout=0.0, + qkv_bias=True, + ): + "Multi Head Attention Layer" + + super().__init__() + d_k = d_v = d_model // n_heads + + self.n_heads, self.d_k, self.d_v = n_heads, d_k, d_v + + self.W_Q = nn.Linear(d_model, d_k * n_heads, bias=qkv_bias) + self.W_K = nn.Linear(d_model, d_k * n_heads, bias=qkv_bias) + self.W_V = nn.Linear(d_model, d_v * n_heads, bias=qkv_bias) + + # Scaled Dot-Product Attention (multiple heads) + self.res_attention = res_attention + self.sdp_attn = _ScaledDotProductAttention( + d_model, + n_heads, + attn_dropout=attn_dropout, + res_attention=self.res_attention, + ) + + # Poject output + self.to_out = nn.Sequential( + nn.Linear(n_heads * d_v, d_model), nn.Dropout(proj_dropout) + ) + + def forward( + self, + Q: Tensor, + K: Optional[Tensor] = None, + V: Optional[Tensor] = None, + prev: Optional[Tensor] = None, + ): + bs = Q.size(0) + if K is None: + K = Q + if V is None: + V = Q + + # Linear (+ split in multiple heads) + q_s = ( + self.W_Q(Q).view(bs, -1, self.n_heads, self.d_k).transpose(1, 2) + ) # q_s: [bs x n_heads x max_q_len x d_k] + k_s = ( + self.W_K(K).view(bs, -1, self.n_heads, self.d_k).permute(0, 2, 3, 1) + ) # k_s: [bs x n_heads x d_k x q_len] - transpose(1,2) + transpose(2,3) + v_s = ( + self.W_V(V).view(bs, -1, self.n_heads, self.d_v).transpose(1, 2) + ) # v_s: [bs x n_heads x q_len x d_v] + + # Apply Scaled Dot-Product Attention (multiple heads) + if self.res_attention: + output, attn_weights, attn_scores = self.sdp_attn(q_s, k_s, v_s, prev=prev) + else: + output, attn_weights = self.sdp_attn(q_s, k_s, v_s) + # output: [bs x n_heads x q_len x d_v], attn: [bs x n_heads x q_len x q_len], scores: [bs x n_heads x max_q_len x q_len] + + # back to the original inputs dimensions + output = ( + output.transpose(1, 2).contiguous().view(bs, -1, self.n_heads * self.d_v) + ) # output: [bs x q_len x n_heads * d_v] + output = self.to_out(output) + + if self.res_attention: + return output, attn_weights, attn_scores + else: + return output, attn_weights + + +class Flatten_Head(nn.Module): + def __init__(self, individual, n_vars, nf, pred_dim): + super().__init__() + + if isinstance(pred_dim, (tuple, list)): + pred_dim = pred_dim[-1] + self.individual = individual + self.n = n_vars if individual else 1 + self.nf, self.pred_dim = nf, pred_dim + + if individual: + self.layers = nn.ModuleList() + for i in range(self.n): + self.layers.append( + nn.Sequential(nn.Flatten(start_dim=-2), nn.Linear(nf, pred_dim)) + ) + else: + self.layer = nn.Sequential( + nn.Flatten(start_dim=-2), nn.Linear(nf, pred_dim) + ) + + def forward(self, x: Tensor): + """ + Args: + x: [bs x nvars x d_model x n_patch] + output: [bs x nvars x pred_dim] + """ + if self.individual: + x_out = [] + for i, layer in enumerate(self.layers): + x_out.append(layer(x[:, i])) + x = torch.stack(x_out, dim=1) + return x + else: + return self.layer(x) + + +class _TSTiEncoderLayer(nn.Module): + def __init__( + self, + q_len, + d_model, + n_heads, + d_k=None, + d_v=None, + d_ff=256, + store_attn=False, + norm="BatchNorm", + attn_dropout=0, + dropout=0.0, + bias=True, + activation="gelu", + res_attention=False, + pre_norm=False, + ): + super().__init__() + assert ( + not d_model % n_heads + ), f"d_model ({d_model}) must be divisible by n_heads ({n_heads})" + d_k = d_model // n_heads if d_k is None else d_k + d_v = d_model // n_heads if d_v is None else d_v + + # Multi-Head attention + self.res_attention = res_attention + self.self_attn = _MultiheadAttention( + d_model, + n_heads, + d_k, + d_v, + attn_dropout=attn_dropout, + proj_dropout=dropout, + res_attention=res_attention, + ) + + # Add & Norm + self.dropout_attn = nn.Dropout(dropout) + if "batch" in norm.lower(): + self.norm_attn = nn.Sequential( + Transpose(1, 2), nn.BatchNorm1d(d_model), Transpose(1, 2) + ) + else: + self.norm_attn = nn.LayerNorm(d_model) + + # Position-wise Feed-Forward + self.ff = nn.Sequential( + nn.Linear(d_model, d_ff, bias=bias), + get_act_fn(activation), + nn.Dropout(dropout), + nn.Linear(d_ff, d_model, bias=bias), + ) + + # Add & Norm + self.dropout_ffn = nn.Dropout(dropout) + if "batch" in norm.lower(): + self.norm_ffn = nn.Sequential( + Transpose(1, 2), nn.BatchNorm1d(d_model), Transpose(1, 2) + ) + else: + self.norm_ffn = nn.LayerNorm(d_model) + + self.pre_norm = pre_norm + self.store_attn = store_attn + + def forward(self, src: Tensor, prev: Optional[Tensor] = None): + """ + Args: + src: [bs x q_len x d_model] + """ + + # Multi-Head attention sublayer + if self.pre_norm: + src = self.norm_attn(src) + ## Multi-Head attention + if self.res_attention: + src2, attn, scores = self.self_attn(src, src, src, prev) + else: + src2, attn = self.self_attn(src, src, src) + if self.store_attn: + self.attn = attn + ## Add & Norm + src = src + self.dropout_attn( + src2 + ) # Add: residual connection with residual dropout + if not self.pre_norm: + src = self.norm_attn(src) + + # Feed-forward sublayer + if self.pre_norm: + src = self.norm_ffn(src) + ## Position-wise Feed-Forward + src2 = self.ff(src) + ## Add & Norm + src = src + self.dropout_ffn( + src2 + ) # Add: residual connection with residual dropout + if not self.pre_norm: + src = self.norm_ffn(src) + + if self.res_attention: + return src, scores + else: + return src + + +class _TSTiEncoder(nn.Module): # i means channel-independent + def __init__( + self, + c_in, + patch_num, + patch_len, + n_layers=3, + d_model=128, + n_heads=16, + d_k=None, + d_v=None, + d_ff=256, + norm="BatchNorm", + attn_dropout=0.0, + dropout=0.0, + act="gelu", + store_attn=False, + res_attention=True, + pre_norm=False, + ): + + super().__init__() + + self.patch_num = patch_num + self.patch_len = patch_len + + # Input encoding + q_len = patch_num + self.W_P = nn.Linear( + patch_len, d_model + ) # Eq 1: projection of feature vectors onto a d-dim vector space + self.seq_len = q_len + + # Positional encoding + W_pos = torch.empty((q_len, d_model)) + nn.init.uniform_(W_pos, -0.02, 0.02) + self.W_pos = nn.Parameter(W_pos) + + # Residual dropout + self.dropout = nn.Dropout(dropout) + + # Encoder + self.layers = nn.ModuleList( + [ + _TSTiEncoderLayer( + q_len, + d_model, + n_heads=n_heads, + d_k=d_k, + d_v=d_v, + d_ff=d_ff, + norm=norm, + attn_dropout=attn_dropout, + dropout=dropout, + activation=act, + res_attention=res_attention, + pre_norm=pre_norm, + store_attn=store_attn, + ) + for i in range(n_layers) + ] + ) + self.res_attention = res_attention + + def forward(self, x: Tensor): + """ + Args: + x: [bs x nvars x patch_len x patch_num] + """ + + n_vars = x.shape[1] + # Input encoding + x = x.permute(0, 1, 3, 2) # x: [bs x nvars x patch_num x patch_len] + x = self.W_P(x) # x: [bs x nvars x patch_num x d_model] + + x = torch.reshape( + x, (x.shape[0] * x.shape[1], x.shape[2], x.shape[3]) + ) # x: [bs * nvars x patch_num x d_model] + x = self.dropout(x + self.W_pos) # x: [bs * nvars x patch_num x d_model] + + # Encoder + if self.res_attention: + scores = None + for mod in self.layers: + x, scores = mod(x, prev=scores) + else: + for mod in self.layers: + x = mod(x) + x = torch.reshape( + x, (-1, n_vars, x.shape[-2], x.shape[-1]) + ) # x: [bs x nvars x patch_num x d_model] + x = x.permute(0, 1, 3, 2) # x: [bs x nvars x d_model x patch_num] + + return x + + +class _PatchTST_backbone(nn.Module): + def __init__( + self, + c_in, + seq_len, + pred_dim, + patch_len, + stride, + n_layers=3, + d_model=128, + n_heads=16, + d_k=None, + d_v=None, + d_ff=256, + norm="BatchNorm", + attn_dropout=0.0, + dropout=0.0, + act="gelu", + res_attention=True, + pre_norm=False, + store_attn=False, + padding_patch=True, + individual=False, + revin=True, + affine=True, + subtract_last=False, + ): + + super().__init__() + + self.revin = revin + self.revin_layer = RevIN(c_in, affine=affine, subtract_last=subtract_last) + + self.patch_len = patch_len + self.stride = stride + self.padding_patch = padding_patch + patch_num = int((seq_len - patch_len) / stride + 1) + 1 + self.patch_num = patch_num + self.padding_patch_layer = nn.ReplicationPad1d((stride, 0)) + + self.unfold = nn.Unfold(kernel_size=(1, patch_len), stride=stride) + self.patch_len = patch_len + + self.backbone = _TSTiEncoder( + c_in, + patch_num=patch_num, + patch_len=patch_len, + n_layers=n_layers, + d_model=d_model, + n_heads=n_heads, + d_k=d_k, + d_v=d_v, + d_ff=d_ff, + attn_dropout=attn_dropout, + dropout=dropout, + act=act, + res_attention=res_attention, + pre_norm=pre_norm, + store_attn=store_attn, + ) + + # Head + self.head_nf = d_model * patch_num + self.n_vars = c_in + self.individual = individual + self.head = Flatten_Head(self.individual, self.n_vars, self.head_nf, pred_dim) + + def forward(self, z: Tensor): + """ + Args: + z: [bs x c_in x seq_len] + """ + + if self.revin: + z = self.revin_layer(z, torch.tensor(True, dtype=torch.bool)) + + z = self.padding_patch_layer(z) + b, c, s = z.size() + z = z.reshape(-1, 1, 1, s) + z = self.unfold(z) + z = z.permute(0, 2, 1).reshape(b, c, -1, self.patch_len).permute(0, 1, 3, 2) + + z = self.backbone(z) + z = self.head(z) + + if self.revin: + z = self.revin_layer(z, torch.tensor(False, dtype=torch.bool)) + return z + + +class PatchTST(nn.Module, PyTorchModelHubMixin): + def __init__( + self, + c_in, + c_out, + seq_len, + pred_dim=None, + n_layers=2, + n_heads=8, + d_model=512, + d_ff=2048, + dropout=0.05, + attn_dropout=0.0, + patch_len=16, + stride=8, + padding_patch=True, + revin=True, + affine=False, + individual=False, + subtract_last=False, + decomposition=False, + kernel_size=25, + activation="gelu", + norm="BatchNorm", + pre_norm=False, + res_attention=True, + store_attn=False, + classification=False, + ): + + super().__init__() + + if pred_dim is None: + pred_dim = seq_len + + self.decomposition = decomposition + if self.decomposition: + self.decomp_module = SeriesDecomposition(kernel_size) + self.model_trend = _PatchTST_backbone( + c_in=c_in, + seq_len=seq_len, + pred_dim=pred_dim, + patch_len=patch_len, + stride=stride, + n_layers=n_layers, + d_model=d_model, + n_heads=n_heads, + d_ff=d_ff, + norm=norm, + attn_dropout=attn_dropout, + dropout=dropout, + act=activation, + res_attention=res_attention, + pre_norm=pre_norm, + store_attn=store_attn, + padding_patch=padding_patch, + individual=individual, + revin=revin, + affine=affine, + subtract_last=subtract_last, + ) + self.model_res = _PatchTST_backbone( + c_in=c_in, + seq_len=seq_len, + pred_dim=pred_dim, + patch_len=patch_len, + stride=stride, + n_layers=n_layers, + d_model=d_model, + n_heads=n_heads, + d_ff=d_ff, + norm=norm, + attn_dropout=attn_dropout, + dropout=dropout, + act=activation, + res_attention=res_attention, + pre_norm=pre_norm, + store_attn=store_attn, + padding_patch=padding_patch, + individual=individual, + revin=revin, + affine=affine, + subtract_last=subtract_last, + ) + self.patch_num = self.model_trend.patch_num + else: + self.model = _PatchTST_backbone( + c_in=c_in, + seq_len=seq_len, + pred_dim=pred_dim, + patch_len=patch_len, + stride=stride, + n_layers=n_layers, + d_model=d_model, + n_heads=n_heads, + d_ff=d_ff, + norm=norm, + attn_dropout=attn_dropout, + dropout=dropout, + act=activation, + res_attention=res_attention, + pre_norm=pre_norm, + store_attn=store_attn, + padding_patch=padding_patch, + individual=individual, + revin=revin, + affine=affine, + subtract_last=subtract_last, + ) + self.patch_num = self.model.patch_num + self.classification = classification + + def forward(self, x): + if self.decomposition: + res_init, trend_init = self.decomp_module(x) + res = self.model_res(res_init) + trend = self.model_trend(trend_init) + x = res + trend + else: + x = self.model(x) + + if self.classification: + x = x.squeeze(-2) + return x \ No newline at end of file diff --git a/experiments/models/ecg/resnet_plus.py b/experiments/models/ecg/resnet_plus.py new file mode 100644 index 0000000..4b804d8 --- /dev/null +++ b/experiments/models/ecg/resnet_plus.py @@ -0,0 +1,233 @@ +from collections import OrderedDict + +import torch +from torch import nn, Tensor +from torch.nn import functional as F + +from huggingface_hub import PyTorchModelHubMixin + + +class Pad1d(nn.ConstantPad1d): + def __init__(self, padding, value=0.0): + super().__init__(padding, value) + + +def same_padding1d(seq_len, ks, stride=1, dilation=1): + "Same padding formula as used in Tensorflow" + p = (seq_len - 1) * stride + (ks - 1) * dilation + 1 - seq_len + return p // 2, p - p // 2 + + +class SameConv1d(nn.Module): + def __init__( + self, + ni: int, + nf: int, + ks: int = 3, + stride: int = 1, + dilation: int = 1, + **kwargs, + ): + super.__init__(self, SameConv1d) + self.ks, self.stride, self.dilation = ks, stride, dilation + self.conv1d_same = nn.Conv1d( + ni, nf, ks, stride=stride, dilation=dilation, **kwargs + ) + self.weight = self.conv1d_same.weight + self.bias = self.conv1d_same.bias + self.pad = Pad1d + + def forward(self, x: Tensor) -> Tensor: + self.padding = same_padding1d( + x.shape[-1], self.ks, dilation=self.dilation + ) # stride=self.stride not used in padding calculation! + return self.conv1d_same(self.pad(self.padding)(x)) + + +def normal_(tensor: Tensor, mean: float = 0.0, std: float = 1.0) -> Tensor: + if torch.overrides.has_torch_function_variadic(tensor): + return torch.overrides.handle_torch_function( + normal_, (tensor,), tensor=tensor, mean=mean, std=std + ) + with torch.no_grad(): + return tensor.normal_(mean, std) + + +def init_linear(m: nn.Module, act_func=None, init="auto", bias_std=0.01): + if getattr(m, "bias", None) is not None and bias_std is not None: + if bias_std != 0: + normal_(m.bias, 0, bias_std) + else: + m.bias.data.zero_() + + if init == "auto" and act_func in (F.relu_, F.leaky_relu_): + init = torch.nn.init.kaiming_uniform_ + + if callable(init): + init(m.weight) + + +def Conv1d( + ni: int, + nf: int, + kernel_size: int = None, + stride: int = 1, + padding="same", + dilation: int = 1, + init="auto", + bias_std: float = 0.01, + **kwargs, +): + if kernel_size % 2 == 1: + conv = nn.Conv1d( + ni, + nf, + kernel_size, + stride=stride, + padding=kernel_size // 2 * dilation, + dilation=dilation, + **kwargs, + ) + else: + conv = SameConv1d( + ni, nf, kernel_size, stride=stride, dilation=dilation, **kwargs + ) + + init_linear(conv, None, init=init, bias_std=bias_std) + return conv + + +class ConvBlock(nn.Sequential): + def __init__( + self, + ni: int, + nf: int, + kernel_size: int = None, + stride: int = 1, + padding="same", + bias=None, + bias_std: float = 0.01, + bn_1st: bool = True, + act=nn.ReLU, + act_kwargs={}, + init="auto", + dropout=0.0, + xtra=None, + **kwargs, + ): + layers = [] + + conv = Conv1d( + ni, nf, kernel_size, bias=bias, stride=stride, padding=padding, **kwargs + ) + act = None if act is None else act(**act_kwargs) + init_linear(conv, act, init=init, bias_std=bias_std) + + layers += [conv] + act_bn = [] + if act is not None: + act_bn.append(act) + + act_bn.append(self._get_norm(nf)) + + if bn_1st: + act_bn.reverse() + + if dropout: + layers += [nn.Dropout(dropout)] + layers += act_bn + if xtra: + layers.append(xtra) + super().__init__(*layers) + + def _get_norm(self, nf, **kwargs) -> nn.Module: + bn = nn.BatchNorm1d(nf, **kwargs) + if bn.affine: + bn.bias.data.fill_(1e-3) + bn.weight.data.fill_(1.0) + return bn + + +class Flatten(nn.Module): + def __init__(self): + super.__init__(self, Flatten) + + def forward(self, x: Tensor) -> Tensor: + return x.view(x.size(0), -1) + + +class ResBlockPlus(nn.Module): + def __init__( + self, + ni, + nf, + ks=[7, 5, 3], + bn_1st=True, + act=nn.ReLU, + act_kwargs={}, + ): + super(ResBlockPlus, self).__init__() + self.convblock1 = ConvBlock( + ni, nf, ks[0], bn_1st=bn_1st, act=act, act_kwargs=act_kwargs + ) + self.convblock2 = ConvBlock( + nf, nf, ks[1], bn_1st=bn_1st, act=act, act_kwargs=act_kwargs + ) + self.convblock3 = ConvBlock(nf, nf, ks[2], act=None) + + self.shortcut = ConvBlock(ni, nf, 1, act=None) + self.act = act(**act_kwargs) + + self._init_cnn(self) + + def _init_cnn(self, m): + if getattr(self, "bias", None) is not None: + nn.init.constant_(self.bias, 0) + if isinstance(self, (nn.Conv1d, nn.Conv2d, nn.Conv3d, nn.Linear)): + nn.init.kaiming_normal_(self.weight) + for l in m.children(): + self._init_cnn(l) + + def forward(self, x): + res = x + x = self.convblock1(x) + x = self.convblock2(x) + x = self.convblock3(x) + + x = x + self.shortcut(res) + x = self.act(x) + return x + + +class AdaptiveConcatPool1d(nn.Module): + def __init__(self, size=None): + super.__init__(self, AdaptiveConcatPool1d) + self.size = size or 1 + self.ap = nn.AdaptiveAvgPool1d(self.size) + self.mp = nn.AdaptiveMaxPool1d(self.size) + + def forward(self, x): + return torch.cat([self.mp(x), self.ap(x)], 1) + + +class GAP1d(nn.Module): + def __init__(self, output_size=1): + super(GAP1d, self).__init__() + self.gap = nn.AdaptiveAvgPool1d(output_size) + + def forward(self, x: Tensor) -> Tensor: + x = self.gap(x) + x = x.reshape(x.shape[0], -1) + return x + + +class ResNetPlus(nn.Sequential, PyTorchModelHubMixin): + def __init__(self, c_in: int, c_out: int, nf: int = 64, **kwargs): + + resblock1 = ResBlockPlus(c_in, nf, **kwargs) + resblock2 = ResBlockPlus(nf, nf * 2, **kwargs) + resblock3 = ResBlockPlus(nf * 2, nf * 2, **kwargs) + backbone = nn.Sequential(resblock1, resblock2, resblock3) + + head = nn.Sequential(GAP1d(1), nn.Linear(nf * 2, c_out)) + super().__init__(OrderedDict([("backbone", backbone), ("head", head)])) diff --git a/experiments/models/tab_resnet.py b/experiments/models/tab_resnet.py new file mode 100644 index 0000000..d1f2a5b --- /dev/null +++ b/experiments/models/tab_resnet.py @@ -0,0 +1,79 @@ +import torch +import torch.nn as nn +import numpy as np + +class ResNetBlock(nn.Module): + def __init__(self, in_features, out_features): + super(ResNetBlock, self).__init__() + self.bn = nn.BatchNorm1d(in_features) + self.fc1 = nn.Linear(in_features, out_features) + self.fc2 = nn.Linear(out_features, out_features) + self.dropout = nn.Dropout(0.2) + + def forward(self, x): + # if x.ndim >= 3: + # x = x.squeeze(1) + y = torch.relu(self.fc1(self.bn(x))) + y = self.dropout(y) + y = self.fc2(y) + y = self.dropout(y) + return torch.add(x, y) + + +class TabResNet(nn.Module): + def __init__(self, in_features, out_features, num_blocks=1, embedding_dim=128): + super(TabResNet, self).__init__() + self.embedding = nn.Linear(in_features, embedding_dim) + self.res_blocks = [] + for i in range(num_blocks): + self.res_blocks.append(ResNetBlock(embedding_dim, embedding_dim)) + self.res_blocks = nn.ModuleList(self.res_blocks) + self.bn = nn.BatchNorm1d(embedding_dim) + self.fc = nn.Linear(embedding_dim, out_features) + + def network(self, x): + x = self.embedding(x) + for block in self.res_blocks: + x = block(x) + x = torch.relu(self.bn(x)) + x = self.fc(x) + return x + + def forward(self, x): + return torch.softmax(self.network(x), dim=1) + + def predict_proba(self, x): + # Currently used by SHAP + input = x if torch.is_tensor(x) else torch.from_numpy(np.array(x)) + return self.forward(input.float()).detach().numpy() + + def predict(self, x, argmax=False): + # Currently used by LIME + input = torch.squeeze(x) if torch.is_tensor(x) else torch.from_numpy(np.array(x)) + output = self.forward(input.float()).detach().numpy() + return output.argmax(axis=-1) if argmax else output + + + +class LogisticRegression(torch.nn.Module): + def __init__(self, input_dim, output_dim): + super(LogisticRegression, self).__init__() + self.linear = torch.nn.Linear(input_dim, output_dim) + + def network(self, x): + return self.linear(x) + + def forward(self, x): + return torch.softmax(self.network(x), dim=1) + + def predict_proba(self, x): + # Currently used by SHAP + input = x if torch.is_tensor(x) else torch.from_numpy(np.array(x)) + return self.forward(input.float()).detach().numpy() + + def predict(self, x, argmax=False): + # Currently used by LIME + input = torch.squeeze(x) if torch.is_tensor(x) else torch.from_numpy(np.array(x)) + output = self.forward(input.float()).detach().numpy() + return output.argmax(axis=-1) if argmax else output + diff --git a/experiments/scripts/__init__.py b/experiments/scripts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/experiments/scripts/analyze_aki_hpo.py b/experiments/scripts/analyze_aki_hpo.py new file mode 100644 index 0000000..3aa589e --- /dev/null +++ b/experiments/scripts/analyze_aki_hpo.py @@ -0,0 +1,474 @@ +import os +import gc +from argparse import ArgumentParser, Namespace +from collections import OrderedDict +from typing import Sequence, Type + +import matplotlib.pyplot as plt +from matplotlib.axes import Axes + +from tqdm.auto import tqdm + +from sklearn.metrics import auc + +import numpy as np + +import torch +from torch import nn +from torch.utils.data import DataLoader + +from pnpxai import Experiment +from pnpxai.explainers import ( + Gradient, + GradientXInput, + IntegratedGradients, + KernelShap, + Lime, + LRPEpsilonAlpha2Beta1, + LRPEpsilonGammaBox, + LRPEpsilonPlus, + LRPUniformEpsilon, + SmoothGrad, + VarGrad, + RAP, + Explainer, +) + +from pnpxai import Experiment, AutoExplanationForTSClassification +from pnpxai.core._types import DataSource + +from experiments.datasets import AKI_COLUMNS +from experiments.utils import set_seed, get_aki_model_from_hf, get_aki_dataset + +import json + + +def get_attrs( + explainer: Explainer, data: DataSource, device: torch.device +) -> np.ndarray: + attrs = [] + for batch in tqdm(data): + x, y = [datum.to(device) for datum in batch] + cur_attrs = explainer.attribute(x, y).detach().cpu().numpy() + attrs.append(cur_attrs) + + return np.concatenate(attrs, axis=0) + + +def get_optimized_attrs( + args: Namespace, + expr: Experiment, + data_ids: Sequence[int], + explainer_id: int, + metric_id: int, + device: torch.device, +) -> np.ndarray: + attrs = [] + metric = expr.manager.get_metrics([metric_id])[0][0] + explainer = expr.manager.get_explainers([explainer_id])[0][0] + n_successful = 0 + + for data_id in data_ids: + try: + optimized = expr.optimize( + data_ids=[data_id], + explainer_id=explainer_id, + metric_id=metric_id, + direction="maximize", + sampler="tpe", # Literal['tpe','random'] + n_trials=args.n_trials, + ) + + datum, _ = expr.manager.get_data([data_id]) + datum = next(iter(datum)) + datum_in, datum_tgt = datum + + attr = optimized.explainer.attribute( + inputs=datum_in.to(device), targets=datum_tgt.to(device) + ) + attr = optimized.postprocessor(attr) + attr = attr.clamp(min=-1 + 1e-9, max=1 - 1e-9).detach().cpu().numpy() + + attrs.append(attr) + + del optimized + n_successful += 1 + except Exception as e: + print( + f"[FAILED!!!] Metric: {metric.__class__.__name__}; Explainer: {explainer.__class__.__name__} with error:\n{e}" + ) + gc.collect() + torch.cuda.empty_cache() + + return np.concatenate(attrs, axis=0) + + +def get_topk_attrs(attrs: np.ndarray, filepath: str): + data_cols = np.array(AKI_COLUMNS[2:]) + n_data_cols = len(data_cols) + + all_in_k = [] + any_in_k = [] + k_cols = [] + + for k in range(2, n_data_cols): + top_ids = attrs.argsort(axis=-1).astype(int) + topk_ids = top_ids[:, -k:] + all_occur = ( + np.isin(topk_ids, [0, 1, 2]).any(-1) # CREAT + & np.isin(topk_ids, [3]).any(-1) # EGFR + ).sum() + any_occur = np.isin(topk_ids, [0, 1, 2, 3, 7, 14, 78]).any(-1).sum() + all_in_k.append(all_occur.item()) + any_in_k.append(any_occur.item()) + + top_col_ids, top_cols_counts = np.unique(topk_ids, return_counts=True) + ids = np.argpartition(-top_cols_counts, kth=k)[:k] + + cur_k_top_cols = dict( + zip( + data_cols[top_col_ids[ids]].tolist(), + top_cols_counts[ids].tolist(), + ) + ) + cur_k_top_cols = OrderedDict( + sorted(cur_k_top_cols.items(), key=lambda x: x[1], reverse=True) + ) + + k_cols.append(cur_k_top_cols) + + dir_path = os.path.dirname(filepath) + if not os.path.exists(dir_path): + os.makedirs(dir_path) + + with open(os.path.join(f"{filepath}.json"), "w") as f: + json.dump({"all_in_k": all_in_k, "any_in_k": any_in_k, "k_cols": k_cols}, f) + + +def analyze( + args: Namespace, + expr: AutoExplanationForTSClassification, + device: torch.device, + data_ids_relative: Sequence, +): + results_root = "results/hpo_analysis_aki" + out_topk_path = os.path.join(results_root, "topk") + out_attr_path = os.path.join(results_root, "attr") + + metric_id = 1 # metric_id to be used as objective: AbPC + + orig_data, _ = expr.manager.get_data() + + expr.predict_batch(data_ids_relative) + + for explainer, explainer_id in zip(*expr.manager.get_explainers()): + exp_name = explainer.__class__.__name__ + + print(f"[{exp_name}] Getting optimized attrs") + attrs = get_optimized_attrs( + args, expr, data_ids_relative, explainer_id, metric_id, device + ) + + attr_dir = os.path.join(out_attr_path, explainer.__class__.__name__) + os.makedirs(attr_dir, exist_ok=True) + + attr_path = os.path.join(attr_dir, "optimized.npy") + np.save(attr_path, attrs) + + print(f"[{exp_name}] Plotting original topk attrs") + get_topk_attrs(attrs, os.path.join(out_topk_path, f"{exp_name}_optimized")) + + print(f"[{exp_name}] Getting original attrs") + attrs = get_attrs(explainer, orig_data, device) + + os.makedirs(attr_dir, exist_ok=True) + attr_path = os.path.join(attr_dir, "original.npy") + np.save(attr_path, attrs) + + get_topk_attrs(attrs, os.path.join(out_topk_path, exp_name)) + + +def filter_and_normalize(data: np.ndarray) -> np.ndarray: + data = data[~np.isnan(data).any(axis=-1)] + data = data[~(data == 0).all(axis=-1)] + + data_min = np.min(data, axis=-1, keepdims=True) + data_max = np.max(data, axis=-1, keepdims=True) + data = (data - data_min) / (data_max - data_min) + return data + + +def get_rma(data: np.ndarray, top_columns: Sequence) -> float: + data = filter_and_normalize(data) + + gt_sum = data[:, top_columns].sum(axis=-1) + all_sum = data.sum(axis=-1) + + rma = gt_sum / all_sum + rma = rma[~np.isnan(rma)] + rma = rma.mean().item() + + return rma + + +def get_rra(data: np.ndarray, top_columns: Sequence) -> float: + data = filter_and_normalize(data) + + data = np.nan_to_num(data, nan=0) + n_top_cols = len(top_columns) + sorted_ids = data.argsort(axis=-1)[:, -n_top_cols:] + rra = np.isin(sorted_ids, top_columns, assume_unique=True).astype(int).sum(axis=-1) + rra = rra / len(top_columns) + rra = rra.mean().item() + return rra + + +def get_auc(data: Sequence) -> float: + data_len = len(data) + x = np.arange(data_len) + y = np.array(data) / data_len + return auc(x, y) + + +def plot_explainers_comparison_bar_chart( + ax: Axes, + explainer_names: Sequence, + orig_val_getter: callable, + opt_val_getter: callable, + explainer_meta_map: dict, +) -> Axes: + w, x = 0.4, np.arange(len(explainer_names)) + for idx, explainer_name in enumerate(explainer_names): + orig_val = orig_val_getter(explainer_name) + opt_val = opt_val_getter(explainer_name) + + ax.bar( + x[idx] - w / 2, + orig_val, + width=w, + color=explainer_meta_map[explainer_name][1] + "40", + edgecolor="#d9d9d9", + linewidth=2, + ) + ax.bar( + x[idx] + w / 2, + opt_val, + width=w, + label=explainer_meta_map[explainer_name][0], + color=explainer_meta_map[explainer_name][1], + edgecolor="#808080", + linewidth=2, + ) + + return ax + + +def visualize(): + plt.rcParams["text.usetex"] = True + plt.rcParams["font.family"] = "Times New Roman" + plt.rcParams["pdf.fonttype"] = 42 + plt.rcParams["mathtext.fontset"] = "stix" + plt.rcParams["axes.unicode_minus"] = False + + EXPLAINER_MAP = { + "Gradient": ("Gradient", "#aec7e8"), + "GradientXInput": ("Gradient$\\times$Input", "#ffbb78"), + "IntegratedGradients": ("Int. Gradients", "#d62628"), + "KernelShap": ("KernelSHAP", "#ff9896"), + "Lime": ("Lime", "#c7c7c7"), + "LRPEpsilonAlpha2Beta1": ("LRP-$\\epsilon \\alpha_2 \\beta_1$", "#c5b1d5"), + "LRPEpsilonGammaBox": ("LRP-$\\epsilon \gamma \\mathcal{B}$", "#8c564b"), + "LRPEpsilonPlus": ("LRP-$\\epsilon^+$", "#e377c2"), + "LRPUniformEpsilon": ("LRP-Uniform-$\\epsilon$", "#f7b6d2"), + "RAP": ("RAP", "#bcbd22"), + "SmoothGrad": ("SmoothGrad", "#16becf"), + "VarGrad": ("VarGrad", "#9ddae5"), + } + + TOP_COLUMNS = [ + 0, # CREAT + 1, # CREAT + 2, # CREAT + 3, # EGFR + 7, # BUN + 14, # BUN + 78, # RENAL_DISEASE + ] + + fig = plt.figure(figsize=(15, 5)) + axes: Sequence[Axes] + axes = fig.subplots(1, 3) + + y_ticks = np.arange(0, 11, 2) / 10 + y_label_kwargs = {"fontname": "Times New Roman", "size": 16, "fontweight": "bold"} + + attr_path = "results/hpo_analysis_aki/topk" + + exp_top_k_feats = {} + exp_all_in_k = {} + + exp_top_k_feats_optimized = {} + exp_all_in_k_optimized = {} + + def read_file(filepath): + with open(os.path.join(filepath), "r") as f: + data = json.loads(f.read()) + n_items = max(data["any_in_k"]) + all_in_k = [datum / n_items for datum in data["all_in_k"]] + top_k_feats = list(data["k_cols"][5].keys()) + return top_k_feats, all_in_k + + for file in os.listdir(attr_path): + if not file.endswith(".json"): + continue + + file_data = read_file(os.path.join(attr_path, file)) + + explainer_name = file.split(".")[0] + is_optimized = "optimized" in explainer_name + if is_optimized: + explainer_name = explainer_name[:-10] + ( + exp_top_k_feats_optimized[explainer_name], + exp_all_in_k_optimized[explainer_name], + ) = file_data + else: + exp_top_k_feats[explainer_name], exp_all_in_k[explainer_name] = file_data + + attr_path = "results/hpo_analysis_aki/attr" + + exp_attrs_orig = {} + exp_attrs_opt = {} + + for explainer in os.listdir(attr_path): + exp_attrs_opt[explainer] = np.load( + os.path.join(attr_path, explainer, "optimized.npy") + ) + exp_attrs_orig[explainer] = np.load( + os.path.join(attr_path, explainer, "original.npy") + ) + + explainer_names = sorted( + list(set(exp_attrs_orig.keys()).intersection(set(exp_attrs_opt.keys()))) + ) + + axes[0] = plot_explainers_comparison_bar_chart( + axes[0], + explainer_names, + lambda name: get_auc(exp_all_in_k[name]), + lambda name: get_auc(exp_all_in_k_optimized[name]), + EXPLAINER_MAP, + ) + axes[0].set_ylabel("AUC of Recall@K", **y_label_kwargs) + + axes[1] = plot_explainers_comparison_bar_chart( + axes[1], + explainer_names, + lambda name: get_rra(exp_attrs_orig[name], TOP_COLUMNS), + lambda name: get_rra(exp_attrs_opt[name], TOP_COLUMNS), + EXPLAINER_MAP, + ) + axes[1].set_ylabel("Relevance Mass Accuracy", **y_label_kwargs) + + axes[2] = plot_explainers_comparison_bar_chart( + axes[2], + explainer_names, + lambda name: get_rra(exp_attrs_orig[name], TOP_COLUMNS), + lambda name: get_rra(exp_attrs_opt[name], TOP_COLUMNS), + EXPLAINER_MAP, + ) + axes[2].set_ylabel("Relevance Rank Accuracy", **y_label_kwargs) + + for ax in axes: + ax.grid(True) + ax.set_xticks([]) + ax.set_yticks(y_ticks) + ax.set_title("Attribution Methods", fontsize=20) + + handles, labels = axes[0].get_legend_handles_labels() + # fig.legend(handles, labels, loc='lower center', bbox_to_anchor=(0.34, -0.03), fancybox=True, ncol=5) + fig.legend( + handles, + labels, + loc="lower center", + bbox_to_anchor=(0.5, -0.08), + fancybox=True, + ncol=6, + ) + + fig.fontsize = 12 + fig.tight_layout() + fig.savefig( + "results/hpo_analysis_aki/explanations_summary.pdf", bbox_inches="tight" + ) + + +def main(): + """Main execution function.""" + # arguments + parser = ArgumentParser() + parser.add_argument( + "--data_path", + default="data/mimiciii/formatted/data.csv", + type=str, + help="Path to preprocessed MIMIC III dataset", + ) + parser.add_argument( + "--n_trials", + default=20, + type=int, + help="Number of trials for hyper-parameter optimization", + ) + parser.add_argument( + "--analyze", + action="store_true", + help="Enable analysis mode", + ) + parser.add_argument( + "--visualize", + action="store_true", + help="Enable visualization mode", + ) + args = parser.parse_args() + + # setup + set_seed(42) + batch_size = 1 + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = get_aki_model_from_hf(repo_id="enver1323/aki-classifier") + model = model.to(device) + model.eval() + + dataset = get_aki_dataset(args.data_path) + if not dataset: + raise ValueError(f"Could not load dataset containing") + + data_ids_relative = range(len(dataset)) + + loader = DataLoader( + dataset, + batch_size=batch_size, + pin_memory=True, + num_workers=2, + shuffle=False, + ) + + # create auto explanation + expr = AutoExplanationForTSClassification( + model=model, + data=loader, + input_extractor=lambda batch: batch[0].to(device), + label_extractor=lambda batch: batch[-1].to(device), + target_extractor=lambda outputs: outputs.argmax(-1).to(device), + target_labels=False, + ) + + if args.analyze: + analyze(args, expr, device, data_ids_relative) + + if args.visualize: + visualize() + + +if __name__ == "__main__": + main() diff --git a/experiments/scripts/analyze_ecg_hpo.py b/experiments/scripts/analyze_ecg_hpo.py new file mode 100644 index 0000000..5322b36 --- /dev/null +++ b/experiments/scripts/analyze_ecg_hpo.py @@ -0,0 +1,368 @@ +import os +import gc +from argparse import ArgumentParser, Namespace +from typing import Union, Type, Sequence, Optional, List + +import torch +from torch import Tensor, nn +from torch.utils.data import Dataset, DataLoader + +from experiments.models import PatchTST, ResNetPlus +from experiments.utils import ( + get_ecg_dataset_from_hf, + get_ecg_patchtst_from_hf, + get_ecg_resnet_from_hf, +) + +from captum import attr as captum_exp + +from pnpxai import Experiment +from pnpxai.core.modality import TimeSeriesModality +from pnpxai import explainers as pnp_exp +from pnpxai.evaluator.metrics import Metric, AbPC, Complexity, Sensitivity + +MODEL_GENERATORS = { + "patchtst": get_ecg_patchtst_from_hf, + "resnet_plus": get_ecg_resnet_from_hf, +} + +PNPXAI_EXPLAINER_TYPES = { + pnp_exp.Gradient: "Gradient", + pnp_exp.GradientXInput: "GradXInput", + pnp_exp.IntegratedGradients: "IntegratedGradients", + pnp_exp.KernelShap: "KernelSHAP", + pnp_exp.Lime: "LIME", + pnp_exp.LRPEpsilonAlpha2Beta1: "LRP_EA2B1", + pnp_exp.LRPEpsilonGammaBox: "LRP_EGB", + pnp_exp.LRPEpsilonPlus: "LRP_E", + pnp_exp.LRPUniformEpsilon: "LRP_UniformE", + pnp_exp.SmoothGrad: "SmoothGrad", + pnp_exp.VarGrad: "VarGrad", +} + + +CAPTUM_EXPLAINER_TYPES = { + captum_exp.InputXGradient: "GradXInput", + captum_exp.IntegratedGradients: "IntegratedGradients", + captum_exp.KernelShap: "KernelSHAP", + captum_exp.Lime: "LIME", +} + + +class Composite(Metric): + def __init__(self, metrics: Sequence[Metric], agg_func: callable): + self.metrics = metrics + self.agg_func = agg_func + + def set_explainer(self, explainer: pnp_exp.Explainer) -> "Composite": + metrics = self.metrics + self.metrics = [] + + clone = self.copy() + + self.metrics = metrics + clone.metrics = [metric.set_explainer(explainer) for metric in metrics] + + return clone + + def evaluate( + self, + inputs: Tensor, + targets: Tensor, + attributions: Optional[Tensor] = None, + **kwargs, + ) -> Tensor: + return self.agg_func( + *[ + metric.evaluate( + inputs=inputs, targets=targets, attributions=attributions, **kwargs + ) + for metric in self.metrics + ] + ) + + +class CaptumExplainerWrapper: + def __init__(self, explainer_type: Type[captum_exp.Attribution], model: nn.Module): + self.model = model + self.explainer: captum_exp.Attribution = explainer_type(self.model) + + def attribute(self, inputs: Tensor, targets: Tensor) -> Tensor: + return self.explainer.attribute(inputs=inputs, target=targets) + + def __call__(self, *args, **kwargs) -> Tensor: + return self.attribute(*args, **kwargs) + + +def metric_to_str(metric: Metric) -> str: + formatter_map = { + AbPC: lambda _: "AbPC", + Complexity: lambda _: "Complexity", + Sensitivity: lambda _: "Sensitivity", + Composite: lambda metric: f"Composite({','.join([str(metric.__class__.__name__) for metric in metric.metrics])})", + } + metric_type = metric.__class__ + if metric_type not in formatter_map: + raise Exception(f"Metric: {metric_type} is not supported.") + + return formatter_map[metric_type](metric) + + +def get_composite_agg_func(params): + def func(*args): + device = args[0].device + total_val = 0 + for arg, param in zip(args, params): + total_val = total_val + arg.to(device) * param + return total_val + + return func + + +def get_explainers( + explainer_types: Sequence[Type[pnp_exp.Explainer]], + model: nn.Module, + modality: TimeSeriesModality, +): + default_kwargs = { + "feature_mask_fn": modality.get_default_feature_mask_fn(), + "baseline_fn": modality.get_default_baseline_fn(), + } + explainers = [] + for explainer_type in explainer_types: + explainer = explainer_type(model=model) + for k, v in default_kwargs.items(): + if hasattr(explainer, k): + explainer = explainer.set_kwargs(**{k: v}) + explainers.append(explainer) + + return explainers + + +def get_metrics( + metric_types: Sequence[Type[Metric]], + model: nn.Module, + modality: TimeSeriesModality, + agg_dim: int, +): + default_kwargs = { + "baseline_fn": modality.get_default_baseline_fn(), + "feature_mask_fn": modality.get_default_feature_mask_fn(), + "channel_dim": modality.channel_dim, + "mask_agg_dim": agg_dim, + } + + metrics = [] + for metric_type in metric_types: + metric = metric_type(model=model) + for k, v in default_kwargs.items(): + if hasattr(metric, k): + metric = metric.set_kwargs(**{k: v}) + metrics.append(metric) + + return metrics + + +def collect_exp_metrics( + model: nn.Module, modality: TimeSeriesModality, agg_dim: int +) -> List[Metric]: + metrics = [] + metrics = get_metrics([AbPC, Complexity, Sensitivity], model, modality, agg_dim) + metrics += [ + Composite( + [metrics[0], metrics[1]], get_composite_agg_func([0.8, -0.2]) + ), # AbPC, Sensitivity + Composite( + [metrics[0], metrics[2]], get_composite_agg_func([0.8, -0.2]) + ), # AbPC, Sensitivity + Composite( + [metrics[0], metrics[1], metrics[2]], + get_composite_agg_func([0.6, -0.2, -0.2]), + ), # AbPC, Complexity, Sensitivity + ] + return metrics + + +def log_data( + filename: str, exp_name: str, metric: str, explainer: str, best_value: float +): + os.makedirs(os.path.dirname(filename), exist_ok=True) + with open(filename, "a+") as file: + file.seek(0, 0) + if len(file.read(1)) == 0: + file.write("experiment,metric,explainer,value\n") + file.seek(2, 0) + file.write(f"{exp_name},{metric},{explainer},{best_value:.4f}\n") + + +def analyze_pnp( + model: nn.Module, + dataset: Dataset, + modality: TimeSeriesModality, + explainer_map: dict, + metrics: Sequence[Metric], + out_filename: str, + device: torch.device, +): + all_data_ids = list(range(len(dataset))) + loader = DataLoader(dataset, batch_size=1) + + explainers = get_explainers(explainer_map.keys(), model, modality) + + expr = Experiment( + model=model, + data=loader, + modality=modality, + explainers=explainers, + postprocessors=modality.get_default_postprocessors(), + metrics=metrics, + input_extractor=lambda batch: batch[0].to(device), + label_extractor=lambda batch: batch[-1].to(device), + target_extractor=lambda outputs: outputs.argmax(-1).to(device), + target_labels=False, + ) + + expr.predict_batch(all_data_ids) + + optimization_directions = { + Complexity: "minimize", + AbPC: "maximize", + Composite: "maximize", + Sensitivity: "minimize", + } + + for metric, metric_id in zip(*expr.manager.get_metrics()): + for explainer, explainer_id in zip(*expr.manager.get_explainers()): + best_value = 0 + n_processed = 0 + + for idx, data_id in enumerate(all_data_ids): + try: + optimized = expr.optimize( + data_ids=[data_id], + explainer_id=explainer_id, + metric_id=metric_id, + direction=optimization_directions[metric.__class__], + sampler="tpe", # Literal['tpe','random'] + n_trials=20, + seed=42, # seed for sampler: by default, None + ) + cur_best_value = optimized.study.best_trial.value + best_value += cur_best_value + print( + f"[{idx + 1}] {str(metric)} | {explainer.__class__.__name__} = {cur_best_value}" + ) # get the optimized value + + del optimized + n_processed += 1 + except Exception as e: + print( + f"[Error] Metric: {metric.__class__.__name__}; Explainer: {explainer.__class__.__name__} with error:\n{e}" + ) + gc.collect() + torch.cuda.empty_cache() + + best_value /= n_processed + print(f"Metric: {str(metric)}; Explainer: {explainer.__class__.__name__};") + print("Best/value:", best_value) # get the optimized value + + log_data( + out_filename, + "PnP XAI", + metric_to_str(metric), + explainer_map[explainer.__class__], + best_value, + ) + + torch.cuda.empty_cache() + gc.collect() + + +def analyze_captum( + model: nn.Module, + dataset: Dataset, + explainer_map: dict, + metrics: Sequence[Metric], + out_filename: str, + device: torch.device, +): + explainer_wrappers = [ + CaptumExplainerWrapper(exp_type, model) for exp_type in explainer_map.keys() + ] + inputs, target = [tensor.to(device) for tensor in dataset.tensors] + + for metric in metrics: + for wrapper in explainer_wrappers: + try: + attributions = wrapper.attribute(inputs, target) + metric = metric.set_explainer(wrapper) + evals = metric.evaluate(inputs, target, attributions) + evals = (sum(evals) / len(evals)).item() + + print( + f"Metric: {str(metric)}; Explainer: {wrapper.__class__.__name__};" + ) + print("Best/value:", evals) + + torch.cuda.empty_cache() + log_data( + out_filename, + "Captum", + metric_to_str(metric), + explainer_map[wrapper.explainer.__class__], + evals, + ) + except Exception as e: + print( + f"[Error] Metric: {metric.__class__.__name__}; Explainer: {wrapper.__class__.__name__}. {e}" + ) + raise e + + +def analyze(args: Namespace): + out_filename = args.out_file + dataset = get_ecg_dataset_from_hf(repo_id="enver1323/ucr-twoleadecg") + + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + model = MODEL_GENERATORS[args.model]().to(device) + + seq_dim = -1 + agg_dim = -2 + modality = TimeSeriesModality(seq_dim) + metrics = collect_exp_metrics(model, modality, agg_dim) + + analyze_pnp( + model, dataset, modality, PNPXAI_EXPLAINER_TYPES, metrics, out_filename, device + ) + analyze_captum( + model, dataset, CAPTUM_EXPLAINER_TYPES, metrics, out_filename, device + ) + + +def main(): + """Main execution function.""" + # arguments + parser = ArgumentParser( + description="Compare explanations by PnPXAI and Captum on ECG dataset" + ) + parser.add_argument( + "--model", + type=str, + choices=MODEL_GENERATORS.keys(), + required=True, + ) + parser.add_argument( + "--out_file", + type=str, + default="results/hpo_analysis_ecg/explanations_summary.csv", + help="Filepath for results' collection", + ) + + args = parser.parse_args() + + analyze(args) + print("\nEvaluation finished.") + + +if __name__ == "__main__": + main() diff --git a/experiments/scripts/analyze_imagenet_hpo.py b/experiments/scripts/analyze_imagenet_hpo.py new file mode 100644 index 0000000..8585e00 --- /dev/null +++ b/experiments/scripts/analyze_imagenet_hpo.py @@ -0,0 +1,343 @@ +from collections import defaultdict +import argparse +import os + +from torchvision.transforms import InterpolationMode, Resize +import captum +import torch +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.gridspec as gridspec + +from pnpxai.core.modality.modality import ImageModality +from pnpxai.explainers import LRPUniformEpsilon, IntegratedGradients, KernelShap +from pnpxai.explainers.utils.feature_masks import FeatureMaskFunction +from pnpxai.explainers.utils.function_selectors import FunctionSelector +from pnpxai.explainers.utils.postprocess import PostProcessor +from pnpxai.evaluator.metrics import MoRF, LeRF, AbPC +from pnpxai.evaluator.optimizer import Objective, optimize + +from experiments.utils import ( + set_seed, + get_torchvision_model, + get_imagenet_sample_from_hf, + denormalize_image, + save_pickle_data, + load_pickle_data, +) + + +# plot settings +plt.rcParams['font.family'] = 'Times New Roman' + + +# configs +TARGET_EXPLAINERS = { + 'lrpe': { + 'pnpxai': LRPUniformEpsilon, + 'captum': captum.attr.LRP, + 'dname': r'LRP-Uniform$\varepsilon$', + }, + 'ig': { + 'pnpxai': IntegratedGradients, + 'captum': captum.attr.IntegratedGradients, + 'dname': 'Integrated Gradients' + }, + 'ks': { + 'pnpxai': KernelShap, + 'captum': captum.attr.KernelShap, + 'dname': 'KernelSHAP', + } +} + +TARGET_METRICS = { + 'morf': { + 'cls': MoRF, + 'dname': r'MoRF$\downarrow$', + }, + 'lerf': { + 'cls': LeRF, + 'dname': r'LeRF$\uparrow$', + }, + 'abpc': { + 'cls': AbPC, + 'dname': r'AbPC$\uparrow$', + }, +} + + +# a custom feature mask function +class Checkerboard(FeatureMaskFunction): + def __init__( + self, + size=[20, 20], + ): + assert len(size) == 2 + self.size = size + self._n_checkers = size[0] * size[1] + + def __call__(self, inputs: torch.Tensor): + assert inputs.dim() == 4 + + bsz, c, h, w = inputs.size() + # print(input_size) + + resize = Resize([h, w], interpolation=InterpolationMode.NEAREST) + + patch_masks = [] + for i in range(self._n_checkers): + mask = np.zeros(self._n_checkers) + mask[i] = i + mask = resize( + torch.Tensor(mask).reshape(- + 1,self.size[0], self.size[1])).unsqueeze(1) + patch_masks.append(mask.numpy()) + return torch.from_numpy(sum(patch_masks)).squeeze(1).repeat( + bsz, 1, 1).long().to(inputs.device) + + +def analyze(args, fp_data): + """ + Performs HPO and saves raw data for a single data instance. + """ + + # Setup + set_seed(args.seed) + device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + model, transform = get_torchvision_model('resnet18') + model = model.to(device) + model.eval() + + img, label = get_imagenet_sample_from_hf( + transform, + hf_repo_id="geonhyeongkim/imagenet-samples-for-pnpxai-experiments", + indices=[args.data_id], + ) + imgs = img.unsqueeze(0).to(device) + targets = torch.tensor(args.data_id).unsqueeze(0).to(device) + + # Get explanations and evaluations + fm_selector = FunctionSelector( + {'checkerboard': Checkerboard}) + modality = ImageModality( # set data modality + channel_dim=1, + feature_mask_fn_selector=fm_selector, + ) + + plot_data = { # container for collecting plot data + 'img': denormalize_image(img, mean=transform.mean, std=transform.std), + 'label': label, + 'heatmaps': defaultdict(dict), + 'values': defaultdict(lambda: defaultdict(dict)), + } + for explainer_key in TARGET_EXPLAINERS: + # --- pnpxai ----------------------------------------------------------- + + # Create explainer + pnp_explainer = TARGET_EXPLAINERS[explainer_key]['pnpxai'](model) + metric = TARGET_METRICS['abpc']['cls']( + model=model, explainer=pnp_explainer, + ) # metric to be used as objective: AbPC + + # Get default postprocessor to initialize optimization: (SumPos, MinMax) + default_pp = modality.get_default_postprocessors()[0] + + # Optimize explainer + obj = Objective( + explainer=pnp_explainer, + postprocessor=default_pp, + metric=metric, + modality=modality, + inputs=imgs, + targets=targets, + ) + study = optimize( + obj, + direction='maximize', + n_trials=args.n_trials, + sampler='tpe', + seed=args.seed, + ) + opt_explainer, opt_pp = study.best_trial.user_attrs.values() + + # Get explanation of the optimized explainer + if explainer_key == 'ks': + setattr(opt_explainer, 'n_samples', 300) + opt_attrs = opt_explainer.attribute(imgs, targets) + opt_attrs_pp = opt_pp(opt_attrs) + plot_data['heatmaps']['pnpxai'][explainer_key] = ( + opt_attrs_pp.squeeze().detach().cpu().numpy()) + + + # --- captum ----------------------------------------------------------- + + # Create explainer + captum_kwargs = {} + if explainer_key == 'ks': + # Use same feature mask with pnpxai + captum_kwargs['feature_mask'] = Checkerboard()(imgs) + captum_kwargs['n_samples'] = 300 + captum_explainer = TARGET_EXPLAINERS[explainer_key]['captum'](model) + + # Get explanation of captum explainer + captum_attrs = captum_explainer.attribute( + inputs=imgs, target=targets, **captum_kwargs) + captum_attrs_pp = default_pp(captum_attrs) + plot_data['heatmaps']['captum'][explainer_key] = ( + captum_attrs_pp.squeeze().detach().cpu().numpy()) + + + # --- evaluation ------------------------------------------------------- + + for metric_key in TARGET_METRICS: + # Create metric and evaluate + pnp_value = TARGET_METRICS[metric_key]['cls']( + model=model, explainer=pnp_explainer).evaluate( + inputs=imgs, targets=targets, attributions=opt_attrs_pp).item() + plot_data['values']['pnpxai'][explainer_key][metric_key] = pnp_value + captum_value = TARGET_METRICS[metric_key]['cls']( + model=model, explainer=captum_explainer).evaluate( + inputs=imgs, targets=targets, attributions=captum_attrs_pp).item() + plot_data['values']['captum'][explainer_key][metric_key] = captum_value + + # Save the data + os.makedirs(os.path.dirname(fp_data), exist_ok=True) + save_pickle_data(data=plot_data, filepath=fp_data) + + +DEFAULT_SELECTED_SAMPLE_INDICES = [ + 75, 358, 367, 852, +] + + +def visualize(args, fp_data, fp_fig): + """ + Loads saved data for a single instance and generates visualization. + """ + # Load the data + plot_data = load_pickle_data(fp_data) + + # Set layout + edge_size = 2.5 + fig = plt.figure(figsize=(edge_size*3, edge_size*3)) + outer = gridspec.GridSpec( + 2, 1, + height_ratios=[1.0, 2.4], + ) + row1 = gridspec.GridSpecFromSubplotSpec( + 1, 3, + subplot_spec=outer[0], + ) + ax_img = fig.add_subplot(row1[0, 0]) + ax_bar = fig.add_subplot(row1[0, 1:3]) + row23 = gridspec.GridSpecFromSubplotSpec( + 2, 3, + subplot_spec=outer[1], + hspace=0.04, + wspace=0.04 + ) + axes_heatmaps = [[ + fig.add_subplot(row23[i, j]) for j in range(3)] for i in range(2)] + + # Plot sample img + ax_img.imshow(plot_data['img']) + ax_img.set_xticks([]); ax_img.set_yticks([]) + ax_img.set_aspect('equal') + ax_img.set_title(plot_data['label'].replace('_', ' ').title(), fontsize=15) + + # Plot differences in evaluations between pnpxai and captum + bar_data = defaultdict(list) + for explainer_key in TARGET_EXPLAINERS: + for metric_key in TARGET_METRICS: + diff = ( + plot_data['values']['pnpxai'][explainer_key][metric_key] + - plot_data['values']['captum'][explainer_key][metric_key] + ) + bar_data[explainer_key].append(diff) + + x = np.arange(len(TARGET_METRICS)) + width = 0.25 + for i, (explainer_key, evals) in enumerate(bar_data.items()): + ax_bar.bar( + x + i*width, evals, width, + label=TARGET_EXPLAINERS[explainer_key]['dname']) + + ax_bar.set_ylim(-.2, .8) + ax_bar.set_title('PnPXAI - Captum', fontsize=15) + ax_bar.set_xticks( + x + width, + [TARGET_METRICS[nm]['dname'] for nm in TARGET_METRICS], + fontsize=10, + ) + ax_bar.grid(axis='y') + ax_bar.margins(x=0.01) + + # Plot heatmaps + for c, explainer_key in enumerate(TARGET_EXPLAINERS): + alpha = 1. + if explainer_key == 'ks': + axes_heatmaps[0][c].imshow(plot_data['img']) + axes_heatmaps[1][c].imshow(plot_data['img']) + alpha *= .75 + + axes_heatmaps[0][c].imshow( + plot_data['heatmaps']['captum'][explainer_key], + cmap='Reds', alpha=alpha, + ) + axes_heatmaps[1][c].imshow( + plot_data['heatmaps']['pnpxai'][explainer_key], + cmap='Reds', alpha=alpha, + ) + + axes_heatmaps[0][c].set_title( + TARGET_EXPLAINERS[explainer_key]['dname'], fontsize=15) + if c == 0: + axes_heatmaps[0][c].set_ylabel('Captum', fontsize=15) + axes_heatmaps[1][c].set_ylabel( + 'PnPXAI (Ours)', fontsize=15, fontweight='bold') + + axes_heatmaps[0][c].set_xticks([]); axes_heatmaps[0][c].set_yticks([]) + axes_heatmaps[1][c].set_xticks([]); axes_heatmaps[1][c].set_yticks([]) + axes_heatmaps[0][c].set_aspect('equal') + axes_heatmaps[1][c].set_aspect('equal') + + # Save figure + fig.legend( + loc='upper center', ncols=3, + bbox_to_anchor=(.5, 1.0), frameon=False, fontsize=12) + os.makedirs(os.path.dirname(fp_fig), exist_ok=True) + fig.savefig(fp_fig, bbox_inches='tight', pad_inches=0.02, dpi=300) + print(f"Visualization saved for Data ID {args.data_id}: {fp_fig}") + + +def main(): + """Main execution function""" + # Arguments + parser = argparse.ArgumentParser() + parser.add_argument('--data_id', type=int, required=True, help='The specific ID of the data instance to process.') + parser.add_argument('--save_dir', type=str, default='results/analyze_imagenet_hpo/') + parser.add_argument('--seed', type=int, default=42) + parser.add_argument('--n_trials', default=100, type=int) + parser.add_argument('--analyze', action='store_true') + parser.add_argument('--visualize', action='store_true') + args = parser.parse_args() + + # Set result filepaths + os.makedirs(args.save_dir, exist_ok=True) + fp_data = os.path.join(args.save_dir, 'raw', f'{args.data_id}.pkl') + fp_fig = os.path.join(args.save_dir, 'figures', f'{args.data_id}.pdf') + + # Run experiment + if args.analyze: + analyze(args, fp_data) + if args.visualize: + if not os.path.exists(fp_data): + raise Exception( + f'{fp_data} not found. Try again with the following flag: --analyze') + visualize(args, fp_data, fp_fig) + + +if __name__ == '__main__': + main() + diff --git a/experiments/scripts/analyze_imagenet_hpo_impact.py b/experiments/scripts/analyze_imagenet_hpo_impact.py new file mode 100644 index 0000000..8071cb4 --- /dev/null +++ b/experiments/scripts/analyze_imagenet_hpo_impact.py @@ -0,0 +1,1348 @@ +import os +import argparse +import itertools +import random +from collections import defaultdict +import matplotlib +import matplotlib.pyplot as plt +import matplotlib.gridspec as gridspec +import matplotlib.ticker as mticker +import numpy as np +from tqdm import tqdm + +import torch +import pandas as pd +from torch.utils.data import DataLoader + +from pnpxai import XaiRecommender, Experiment, AutoExplanationForImageClassification +from pnpxai.evaluator.metrics import AbPC, Complexity + +from experiments.utils import ( + set_seed, set_params, + patch_lrp_explainer, + get_torchvision_model, + get_imagenet_dataset, get_imagenet_val_dataset, + denormalize_image, + save_pickle_data, load_pickle_data, +) + +import pdb + + +# --- Matplotlib Configuration --- +matplotlib.rcParams['font.family'] = 'serif' +matplotlib.rcParams['font.serif'] = ['Times New Roman'] + matplotlib.rcParams['font.serif'] +matplotlib.rcParams['mathtext.fontset'] = 'stix' +matplotlib.rcParams['axes.unicode_minus'] = False +matplotlib.rcParams['pdf.fonttype'] = 42 + +TORCHVISION_MODEL_CHOICES = [ + 'resnet18', + 'vit_b_16', + # ... +] + +# --- Hyperparameter Grid Definitions --- + +POSTPROCESS_PARAM_KEYS = {'pooling_method', 'normalization_method'} + +# Define common hyperparameters +COMMON_HYPERPARAMETERS = { + 'pooling_method': ['sumpos', 'sumabs', 'l1norm', 'maxnorm', 'l2norm', 'l2normsq', 'possum', 'posmaxnorm', 'posl2norm', 'posl2normsq'], + 'normalization_method': ['identity', 'minmax'], +} + +# Define explainer-specific hyperparameters +EXPL_HYPERPARAMETERS = { + 'guided_grad_cam': { + 'interpolate_mode': ['nearest', 'area'], + }, + 'integrated_gradients': { + 'n_steps': [10, 20, 30, 40, 50, 60, 70, 80, 90, 100], + 'baseline_fn': ['zeros', 'mean', 'invert'], + }, + 'kernel_shap': { + 'n_steps': [10, 20, 30, 40, 50], + 'baseline_fn': ['zeros', 'mean', 'invert'], + 'feature_mask_fn': ['felzenszwalb', 'quickshift', 'slic'], + }, + 'lrp_epsilon_alpha2_beta1': { + 'epsilon': [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1.0], + }, + 'lrp_epsilon_gamma_box': { + 'epsilon': [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1.0], + 'gamma': [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1.0], + }, + 'lrp_epsilon_plus': { + 'epsilon': [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1.0], + }, + 'lrp_uniform_epsilon': { + 'epsilon': [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1.0], + }, + 'lime': { + 'n_steps': [10, 20, 30, 40, 50], + 'baseline_fn': ['zeros', 'mean', 'invert'], + 'feature_mask_fn': ['felzenszwalb', 'quickshift', 'slic'], + }, + 'smooth_grad': { + 'noise_level': np.round(np.arange(0.10, 1.05, 0.10), 2).tolist(), + 'n_iter': np.arange(10, 101, 10).tolist(), + }, + 'var_grad': { + 'noise_level': np.round(np.arange(0.10, 1.05, 0.10), 2).tolist(), + 'n_iter': np.arange(10, 101, 10).tolist(), + }, +} +# Special 2x2 grid for LRPEpsilonGammaBox qualitative examples, +# as the main grid lacks the precise default and optimized values. +QUALITATIVE_HYPERPARAMETERS = { + 'lrp_epsilon_gamma_box': { + 'epsilon': [1e-6, 0.022684753958641276], + 'gamma': [0.2413579229130399, 0.25], + }, +} + +# Define plotting configurations for each explainer +PLOTTING_CONFIGS = { + # 1. Impact of Pooling Methods + 'grad_cam': { + 'column_hp': 'pooling_method', + 'row_hps': ['normalization_method'], + 'fixed_hps': [], # No other HPs to fix + }, + 'guided_grad_cam': { + 'column_hp': 'pooling_method', + 'row_hps': ['normalization_method', 'interpolate_mode'], + 'fixed_hps': [], + }, + 'gradient': { + 'column_hp': 'pooling_method', + 'row_hps': ['normalization_method'], + 'fixed_hps': [], + }, + 'gradient_x_input': { + 'column_hp': 'pooling_method', + 'row_hps': ['normalization_method'], + 'fixed_hps': [], + }, + + # 2. Impact of Noise Level / Epsilon + 'lrp_epsilon_alpha2_beta1': { + 'column_hp': 'pooling_method', + 'row_hps': ['epsilon'], + 'fixed_hps': ['normalization_method'], + }, + 'lrp_epsilon_plus': { + 'column_hp': 'pooling_method', + 'row_hps': ['epsilon'], + 'fixed_hps': ['normalization_method'], + }, + 'lrp_uniform_epsilon': { + 'column_hp': 'pooling_method', + 'row_hps': ['epsilon'], + 'fixed_hps': ['normalization_method'], + }, + 'lrp_epsilon_gamma_box': { + 'column_hp': 'gamma', + 'row_hps': ['epsilon'], + 'fixed_hps': ['pooling_method', 'normalization_method'], # These are fixed at best_params values + }, + 'smooth_grad': { + 'column_hp': 'n_iter', + 'row_hps': ['noise_level'], + 'fixed_hps': ['pooling_method', 'normalization_method'], + }, + 'var_grad': { + 'column_hp': 'n_iter', + 'row_hps': ['noise_level'], + 'fixed_hps': ['pooling_method', 'normalization_method'], + }, + + # 3. Impact of Baseline Functions and Feature Mask Functions + 'kernel_shap': { + 'column_hp': 'baseline_fn', + 'row_hps': ['feature_mask_fn'], + 'fixed_hps': ['n_steps', 'pooling_method', 'normalization_method'], + }, + 'lime': { + 'column_hp': 'baseline_fn', + 'row_hps': ['feature_mask_fn'], + 'fixed_hps': ['n_steps', 'pooling_method', 'normalization_method'], + }, + + # 4. Impact of Number of Steps + 'integrated_gradients': { + 'column_hp': 'n_steps', + 'row_hps': ['baseline_fn'], + 'fixed_hps': ['pooling_method', 'normalization_method'], + }, +} + +DEFAULT_PLOT_CONFIG = { + 'column_hp': 'pooling_method', + 'row_hps': ['normalization_method'], # Default to vary normalization in rows if no other HPs are specified for rows + 'fixed_hps': [], +} + +# Metrics to evaluate in the grid search +TARGET_METRIC_KEYS = ['ab_pc', 'complexity'] + + +def _run_grid_for_explainer( + explainer_key, + explainer_class, + postprocessor_type, + model, + hyperparameter_source, + best_params_for_explainer, + expr, + data_id_iterator, + batch_size, + dataset, + target_metric_ids, +): + """ + Helper function to run the actual grid evaluation loop for a given set of hyperparameters. + This function is called by `run_grid_search_evaluation`. + """ + print(f"Running grid search for {explainer_key} using HP source: {hyperparameter_source.get(explainer_key, {}).keys()}") + + # --- Setup Grid Parameters --- + plot_config = PLOTTING_CONFIGS.get(explainer_key, DEFAULT_PLOT_CONFIG) + column_hp_key = plot_config['column_hp'] + row_hp_keys = plot_config['row_hps'] + fixed_hps = plot_config['fixed_hps'] + + all_hps_for_explainer = { + **hyperparameter_source.get(explainer_key, {}), + **COMMON_HYPERPARAMETERS + } + + column_values = all_hps_for_explainer.get(column_hp_key, []) + if not column_values: + print(f"Warning: No column HP values found for '{column_hp_key}'. Skipping run.") + return {}, {}, {} + + row_param_values_lists = [] + for rhp_key in row_hp_keys: + values = all_hps_for_explainer.get(rhp_key, []) + if not values: + print(f"Warning: No row HP values found for '{rhp_key}'. Skipping run.") + return {}, {}, {} + row_param_values_lists.append(values) + + row_combinations = list(itertools.product(*row_param_values_lists)) + if not row_combinations and row_hp_keys: # Ensure run if no row HPs (row_combinations=[()]) + print(f"Warning: No valid row combinations found. Skipping run.") + return {}, {}, {} + elif not row_combinations and not row_hp_keys: + row_combinations = [()] # Allow run if no row HPs are defined + + # --- Initialize Result Dictionaries --- + all_images_heatmaps = {} + all_images_eval_scores = {} + predicted_labels = {} # Labels will be populated by the first batch loop + + # --- Run Nested Loops --- + num_batches = (len(data_id_iterator) + batch_size - 1) // batch_size + + for batch_idx in tqdm(range(num_batches), desc=f"Computing batches for {explainer_key}", leave=False): + start_idx = batch_idx * batch_size + end_idx = min((batch_idx + 1) * batch_size, len(data_id_iterator)) + cur_batch_data_ids = list(data_id_iterator[start_idx:end_idx]) + + if not cur_batch_data_ids: + continue + + # Prediction and label generation (only needs to run once per batch) + expr.predict_batch(data_ids=cur_batch_data_ids) + target_idx = expr.get_targets_flattened(data_ids=cur_batch_data_ids) + for i, data_id in enumerate(cur_batch_data_ids): + predicted_labels[data_id] = dataset.class_index[str(target_idx[i].item())][-1] + + # --- Nested loops for HP combinations --- + for row_idx, row_combo_values in enumerate(tqdm(row_combinations, desc="Row HP Combinations", leave=False)): + current_row_params_dict = dict(zip(row_hp_keys, row_combo_values)) + + for col_idx, current_col_value in enumerate(tqdm(column_values, desc="Column HP Values", leave=False)): + params = best_params_for_explainer.copy() + params.update(current_row_params_dict) + params[column_hp_key] = current_col_value + + for fixed_hp_key in fixed_hps: + if fixed_hp_key not in params: + if fixed_hp_key in all_hps_for_explainer and all_hps_for_explainer[fixed_hp_key]: + params[fixed_hp_key] = all_hps_for_explainer[fixed_hp_key][0] + else: + print(f"Warning: Fixed HP '{fixed_hp_key}' not found.") + + explainer_kwargs, post_kwargs = set_params(params, expr.modality) + + current_explainer = explainer_class(model=model).set_kwargs(**explainer_kwargs) + current_explainer = patch_lrp_explainer(current_explainer) # Apply LRP patch + current_postprocessor = postprocessor_type.from_name(**post_kwargs) + + current_explainer_id = expr.manager.add_explainer(current_explainer) + current_postprocessor_id = expr.manager.add_postprocessor(current_postprocessor) + + with torch.no_grad(): + expr.explain_batch( + data_ids=cur_batch_data_ids, + explainer_id=current_explainer_id + ) + opt_attrs_pp = expr.postprocess_batch( + data_ids=cur_batch_data_ids, + explainer_id=current_explainer_id, + postprocessor_id=current_postprocessor_id + ) + + if torch.isnan(opt_attrs_pp).any(): + print(f" WARNING: NaN detected for params={params}. Clamping to zero.") + opt_attrs_pp = torch.zeros_like(opt_attrs_pp) + + for i, data_id in enumerate(cur_batch_data_ids): + if data_id not in all_images_heatmaps: + all_images_heatmaps[data_id] = {} + all_images_heatmaps[data_id][(row_idx, col_idx)] = opt_attrs_pp[i].cpu().numpy() + + for target_metric_key, target_metric_id in target_metric_ids.items(): + opt_evals = expr.evaluate_batch( + data_ids=cur_batch_data_ids, + explainer_id=current_explainer_id, + postprocessor_id=current_postprocessor_id, + metric_id=target_metric_id + ) + for i, data_id in enumerate(cur_batch_data_ids): + if data_id not in all_images_eval_scores: + all_images_eval_scores[data_id] = {} + all_images_eval_scores[data_id][(row_idx, col_idx, target_metric_key)] = opt_evals[i] + + return all_images_heatmaps, all_images_eval_scores, predicted_labels + + +def run_grid_search_evaluation(args): + """ + Runs the HPO grid search evaluation. + This function now handles the special 2x2 grid case for lrp_epsilon_gamma_box + by calling a helper function `_run_grid_for_explainer`. + """ + # Setup + cwd = os.getcwd() + use_gpu = torch.cuda.is_available() and not args.disable_gpu + device = torch.device('cuda' if use_gpu else 'cpu') + set_seed(0) + + # Prepare model + model, transform = get_torchvision_model(args.model) + model.to(device) + model.eval() + + # Prepare data + dataset = get_imagenet_val_dataset( + transform, args.data_dir, + ) + + data_id_iterator = range(args.data_from, args.data_to) + batch_size = args.batch_size + dataloader = DataLoader( + dataset, + batch_size=batch_size, + num_workers=0, + pin_memory=use_gpu, + shuffle=False, + ) + + if not dataloader: + raise ValueError("Dataloader is empty. Check dataset path or size.") + + # --- AutoExplanation --- + expr = AutoExplanationForImageClassification( + model=model, + data=dataloader, # Dataloader is used for init, loop uses data_id_iterator + input_extractor=lambda batch: batch[0].to(device), + label_extractor=lambda batch: batch[1].to(device), + target_extractor=lambda outputs: outputs.argmax(-1).to(device), + target_labels=False, + ) + + # Map explainer string keys to their base class names + KEY_TO_CLASS_NAME_MAP = { + 'grad_cam': 'GradCAM', + 'guided_grad_cam': 'GuidedGradCam', + 'gradient': 'Gradient', + 'gradient_x_input': 'GradientXInput', + 'integrated_gradients': 'IntegratedGradients', + 'kernel_shap': 'KernelShap', + 'lrp_epsilon_alpha2_beta1': 'LRPEpsilonAlpha2Beta1', + 'lrp_epsilon_gamma_box': 'LRPEpsilonGammaBox', + 'lrp_epsilon_plus': 'LRPEpsilonPlus', + 'lrp_uniform_epsilon': 'LRPUniformEpsilon', + 'lime': 'Lime', + 'smooth_grad': 'SmoothGrad', + 'var_grad': 'VarGrad', + } + + # Map class names to the actual base instances initialized by AutoExplanation + CLASS_NAME_TO_CLASS_TYPE = {exp.__class__.__name__: exp.__class__ for exp in expr.manager.explainers} + + # Get the base postprocessor instance + base_postprocessor_instance = expr.manager.get_postprocessor_by_id(0) + PostprocessorType = type(base_postprocessor_instance) + + # Map target metric keys (like 'ab_pc') to their metric IDs + METRIC_NAME_TO_ID_MAP = {type(m).__name__.lower(): i for i, m in enumerate(expr.manager.metrics)} + target_metric_ids = { + 'ab_pc': METRIC_NAME_TO_ID_MAP.get('abpc'), + 'complexity': METRIC_NAME_TO_ID_MAP.get('complexity') + } + if None in target_metric_ids.values(): + raise RuntimeError(f"Could not find all target metrics. Found: {METRIC_NAME_TO_ID_MAP}") + + # Load optimized parameters + best_params = defaultdict(dict) + for explainer_key in args.eval_explainer: + params_path = os.path.join(cwd, f'data/ImageNet/optimized_params/{args.model}/{explainer_key}.pkl') + if not os.path.exists(params_path): + print(f"Warning: Optimized params file not found at {params_path}. Using empty dict.") + else: + best_params[explainer_key] = load_pickle_data(params_path)['composite'] + + # --- Main Loop --- + for explainer_key in args.eval_explainer: + print(f"\n--- Processing Explainer: {explainer_key} ---") + + explainer_class_name = KEY_TO_CLASS_NAME_MAP.get(explainer_key) + ExplainerClass = CLASS_NAME_TO_CLASS_TYPE.get(explainer_class_name) + if not ExplainerClass: + print(f"Error: Could not find class type for {explainer_class_name}. Skipping.") + continue + + # Run the main evaluation grid + main_heatmaps, main_scores, main_labels = _run_grid_for_explainer( + explainer_key=explainer_key, + explainer_class=ExplainerClass, + postprocessor_type=PostprocessorType, + model=model, + hyperparameter_source=EXPL_HYPERPARAMETERS, + best_params_for_explainer=best_params[explainer_key], + expr=expr, + data_id_iterator=data_id_iterator, + batch_size=batch_size, + dataset=dataset, + target_metric_ids=target_metric_ids, + ) + + # Save main results + if main_heatmaps: + savedir = os.path.join(cwd, f'results/hpo_impact_imagenet/raw/{args.model}') + save_fname = os.path.join(savedir, f'{explainer_key}.pkl') + os.makedirs(savedir, exist_ok=True) + save_pickle_data( + data={'all_images_heatmaps': main_heatmaps, 'all_images_eval_scores': main_scores, 'predicted_labels': main_labels}, + filepath=save_fname, + ) + print(f"Saved main analysis results for {explainer_key} to {save_fname}") + + # Run the special 2x2 grid for LRPEpsilonGammaBox (if applicable) + if explainer_key == 'lrp_epsilon_gamma_box': + print(f"\n--- Processing Explainer: {explainer_key} (2x2 Qualitative Grid) ---") + + q_heatmaps, q_scores, q_labels = _run_grid_for_explainer( + explainer_key=explainer_key, + explainer_class=ExplainerClass, + postprocessor_type=PostprocessorType, + model=model, + hyperparameter_source=QUALITATIVE_HYPERPARAMETERS, # Use the 2x2 grid + best_params_for_explainer=best_params[explainer_key], + expr=expr, + data_id_iterator=data_id_iterator, + batch_size=batch_size, + dataset=dataset, + target_metric_ids=target_metric_ids, + ) + + # Save special 2x2 results for LRPEpsilonGammaBox + if q_heatmaps: + savedir = os.path.join(cwd, f'results/hpo_impact_imagenet/raw/{args.model}') + save_fname_2x2 = os.path.join(savedir, f'{explainer_key}_2x2.pkl') + os.makedirs(savedir, exist_ok=True) + save_pickle_data( + data={'all_images_heatmaps': q_heatmaps, 'all_images_eval_scores': q_scores, 'predicted_labels': q_labels}, + filepath=save_fname_2x2, + ) + print(f"Saved 2x2 qualitative results for {explainer_key} to {save_fname_2x2}") + + +def _find_param_indices(params_to_find, row_hp_keys, row_combinations, column_hp_key, column_values): + """ + Helper function to find the grid (row, col) indices for a set of hyperparameters. + """ + print(f'\nparams_to_find:\n{params_to_find}') + if not params_to_find: + return None + + # Find column index by finding the closest value + opt_col_val = params_to_find.get(column_hp_key) + c_idx = -1 + if opt_col_val is not None: + try: + # Always find the closest value for numeric types + if isinstance(opt_col_val, (int, float)): + c_idx = np.argmin(np.abs(np.array(column_values) - opt_col_val)) + else: + c_idx = column_values.index(opt_col_val) + except (ValueError, IndexError): + print(f"Warning: Could not find column value {opt_col_val} for {column_hp_key}.") + return None + + # Find row index by finding the closest value + r_idx = -1 + try: + target_row_values = [params_to_find.get(key) for key in row_hp_keys] + + # Check if all row values are numeric to calculate distance + is_numeric_search = all(isinstance(v, (int, float)) for v in target_row_values) + + if is_numeric_search: + target_np = np.array(target_row_values) + rows_np = np.array(row_combinations) + # Calculate Euclidean distance and find the index of the minimum distance + distances = np.linalg.norm(rows_np - target_np, axis=1) + r_idx = np.argmin(distances) + else: + # Fallback to exact match for non-numeric or mixed types (e.g., strings) + r_idx = row_combinations.index(tuple(target_row_values)) + + except (ValueError, IndexError): + print(f"Warning: Could not find row combination for {target_row_values}.") + return None + + if r_idx >= 0 and c_idx >= 0: + return (r_idx, c_idx) + return None + + +def plot_hpo_analysis(args): + """ + Analyzes HPO results at the dataset level, focusing on a composite score. + Generates a composite plot with a central heatmap (for the composite score) + and marginal line plots showing AbPC, Simplicity, and Composite scores. + """ + print("\n--- Starting Dataset-Level HPO Analysis ---") + cwd = os.getcwd() + + # Define explainer-specific default parameters to locate them on the grid + DEFAULT_PARAMS_CONFIG = { + 'smooth_grad': { + 'n_iter': 20, + 'noise_level': 0.1, + }, + 'lrp_epsilon_gamma_box': { + 'epsilon': 1e-6, + 'gamma': 0.25, + }, + 'integrated_gradients': { + 'n_steps': 20, + 'baseline_fn': 'zeros', + }, + 'guided_grad_cam': { + 'interpolate_mode': 'nearest', + 'pooling_method': 'sumpos', + 'normalization_method': 'identity', + }, + } + FIGURE_SIZE_CONFIG = { + 'default': { + 'cell_size': 1.0, 'width_padding': 1.0, 'height_padding': 0.0, 'additional_fontsize': 0 + }, + 'smooth_grad': { + 'cell_size': 1.0, 'width_padding': 1.45, 'height_padding': 0.0, 'additional_fontsize': 2.5 + }, + 'lrp_epsilon_gamma_box': { + 'cell_size': 1.0, 'width_padding': 1.4, 'height_padding': 0.0, 'additional_fontsize': 1 + }, + 'guided_grad_cam': { + 'cell_size': 1.2, 'width_padding': 0.0, 'height_padding': 0.2, 'additional_fontsize': 1.2 + }, + 'integrated_gradients': { + 'cell_size': 1.2, 'width_padding': 0.0, 'height_padding': 0.5, 'additional_fontsize': 2 + }, + } + MARGINAL_PLOT_CONFIG = { + 'default': { + 'row': {'nbins': 3, 'format': '.3f'}, 'row_twin': {'nbins': 3, 'format': '.2f'}, + 'col': {'nbins': 3, 'format': '.2f'}, 'col_twin': {'nbins': 3, 'format': '.3f'} + }, + 'smooth_grad': { + 'row': {'nbins': 2, 'format': '.2f'}, 'row_twin': {'nbins': 2, 'format': '.2f'}, + 'col': {'nbins': 3, 'format': '.2f'}, 'col_twin': {'nbins': 2, 'format': '.3f'} + }, + 'lrp_epsilon_gamma_box': { + 'row': {'nbins': 2, 'format': '.3f'}, 'row_twin': {'nbins': 3, 'format': '.2f'}, + 'col': {'nbins': 3, 'format': '.2f'}, 'col_twin': {'nbins': 3, 'format': '.2f'} + }, + 'guided_grad_cam': { + 'row': {'nbins': 2, 'format': '.3f'}, 'row_twin': {'nbins': 2, 'format': '.2f'}, + 'col': {'nbins': 3, 'format': '.2f'}, 'col_twin': {'nbins': 2, 'format': '.3f'} + }, + 'integrated_gradients': { + 'row': {'nbins': 2, 'format': '.3f'}, 'row_twin': {'nbins': 3, 'format': '.2f'}, + 'col': {'nbins': 3, 'format': '.2f'}, 'col_twin': {'nbins': 2, 'format': '.3f'} + }, + } + + # Add 'composite' to the list of metrics to process + METRICS_TO_PROCESS = TARGET_METRIC_KEYS + ['composite', 'simplicity'] + + # Pre-calculate figure widths to determine proportional font sizes + print(" Pre-calculating figure widths for font scaling...") + figure_widths = {} + anchor_explainer = 'lrp_epsilon_gamma_box' + explainers_for_width_calc = ['lrp_epsilon_gamma_box', 'smooth_grad', 'integrated_gradients', 'guided_grad_cam'] + + for key in explainers_for_width_calc: + if key in PLOTTING_CONFIGS: + plot_config = PLOTTING_CONFIGS[key] + column_hp_key = plot_config['column_hp'] + all_hps = {**EXPL_HYPERPARAMETERS.get(key, {}), **COMMON_HYPERPARAMETERS} + num_cols = len(all_hps.get(column_hp_key, [])) + + fig_settings = FIGURE_SIZE_CONFIG.get(key, FIGURE_SIZE_CONFIG['default']) + cell_size = fig_settings['cell_size'] + width_padding_size = fig_settings['width_padding'] + + colorbar_width = 0.2 + marg_row_width = 2.0 + + # Calculate and store the total figure width + w = (num_cols * cell_size) + colorbar_width + marg_row_width + width_padding_size + figure_widths[key] = w + + # Define the anchor font size and get the anchor width + anchor_fontsize = 22 + anchor_width = figure_widths.get(anchor_explainer, 1.0) + print(f" Anchor width ({anchor_explainer}) set to: {anchor_width:.2f} with font size {anchor_fontsize}") + + for explainer_key in args.eval_explainer: + print(f"\n--- Analyzing Explainer: {explainer_key} ---") + + # Load pre-computed analysis results and optimized hyperparameters + load_path = os.path.join(cwd, f'results/hpo_impact_imagenet/raw/{args.model}/{explainer_key}.pkl') + best_params_path = os.path.join(cwd, f'data/ImageNet/optimized_params/{args.model}/{explainer_key}.pkl') + + if not os.path.exists(load_path) or not os.path.exists(best_params_path): + print(f" ERROR: Missing results file or params file. Please run --analyze first.") + print(f" - Checked for results: {load_path}") + print(f" - Checked for params: {best_params_path}") + continue + + results = load_pickle_data(load_path) + best_params = load_pickle_data(best_params_path)['composite'] + all_images_eval_scores = results['all_images_eval_scores'] + + if not all_images_eval_scores: + print(f" WARNING: No evaluation scores found in {load_path}. Skipping.") + continue + + # Get HP configuration + plot_config = PLOTTING_CONFIGS.get(explainer_key, DEFAULT_PLOT_CONFIG) + column_hp_key = plot_config['column_hp'] + row_hp_keys = plot_config['row_hps'] + all_hps_for_explainer = {**EXPL_HYPERPARAMETERS.get(explainer_key, {}), **COMMON_HYPERPARAMETERS} + column_values = all_hps_for_explainer.get(column_hp_key, []) + row_param_values_lists = [all_hps_for_explainer.get(k, []) for k in row_hp_keys] + row_combinations = list(itertools.product(*row_param_values_lists)) + num_images, num_rows, num_cols = len(all_images_eval_scores), len(row_combinations), len(column_values) + + # Aggregate scores and calculate composite score + aggregated_scores = {m: np.full((num_images, num_rows, num_cols), np.nan) for m in METRICS_TO_PROCESS} + for img_idx, data_id in enumerate(all_images_eval_scores.keys()): + for r_idx in range(num_rows): + for c_idx in range(num_cols): + for m_key in TARGET_METRIC_KEYS: # Only load AbPC and Complexity + score = all_images_eval_scores[data_id].get((r_idx, c_idx, m_key)) + if score is not None and not torch.isnan(score): + aggregated_scores[m_key][img_idx, r_idx, c_idx] = score.item() + + # Calculate composite score from aggregated base metrics + aggregated_scores['composite'] = 0.7 * aggregated_scores['ab_pc'] - 0.3 * aggregated_scores['complexity'] + aggregated_scores['simplicity'] = -1 * aggregated_scores['complexity'] + + # Calculate mean and std for all metrics + mean_scores = {m: np.nanmean(aggregated_scores[m], axis=0) for m in METRICS_TO_PROCESS} + std_scores = {m: np.nanstd(aggregated_scores[m], axis=0) for m in METRICS_TO_PROCESS} + + # Find indices of default and optimized parameters + if explainer_key in DEFAULT_PARAMS_CONFIG: + print(f" Found specific default parameters for {explainer_key}.") + default_params_to_find = DEFAULT_PARAMS_CONFIG[explainer_key] + default_indices = _find_param_indices(default_params_to_find, row_hp_keys, row_combinations, column_hp_key, column_values) + else: + print(" Using (0, 0) as the default parameter index.") + default_indices = (0, 0) + + optimized_indices = _find_param_indices(best_params, row_hp_keys, row_combinations, column_hp_key, column_values) + + print(f" Default HP indices: {default_indices}") + print(f" Optimized HP indices: {optimized_indices}") + + # Generate composite plot + LABEL_MAP = { + "pooling_method": "Pooling", + "interpolate_mode": "Interpolation", + "normalization_method": "Normalization", + "n_steps": "Integration Steps", + "baseline_fn": "Baseline Function", + "noise_level": "Noise Level", + "n_iter": "Number of Samples", + } + POOLING_METHOD_VALUE_MAP = { + 'sumpos': 'sum,pos', 'sumabs': 'sum,abs', 'l1norm': 'l1-norm', + 'maxnorm': 'max-norm', 'l2norm': 'l2-norm', 'l2normsq': 'l2-norm-sq', + 'possum': 'pos,sum', 'posmaxnorm': 'pos,max-norm', + 'posl2norm': 'pos,l2-norm', 'posl2normsq': 'pos,l2-norm-sq' + } + LATEX_SYMBOL_MAP = { + 'gamma': r'$\gamma$', + 'epsilon': r'$\epsilon$' + } + + row_labels = ["\n".join([f"{LABEL_MAP.get(k, k)}={v:.2g}" if isinstance(v, float) else f"{LABEL_MAP.get(k, k)}={v}" for k, v in zip(row_hp_keys, combo)]) for combo in row_combinations] + col_labels = [f"{v:.2g}" if isinstance(v, float) else str(v) for v in column_values] + + # --- Export Data to CSV --- + # Define the header labels for CSVs + row_labels_for_csv = [] + for combo in row_combinations: + label_parts = [f"{key}={val:.2g}" if isinstance(val, float) else f"{key}={val}" for key, val in zip(row_hp_keys, combo)] + row_labels_for_csv.append(" | ".join(label_parts)) + + col_labels_for_csv = [] + for val in column_values: + val_str = f"{val:.2g}" if isinstance(val, float) else str(val) + col_labels_for_csv.append(f"{column_hp_key}={val_str}") + + print(f" Exporting aggregated scores to CSV for {explainer_key}...") + savedir_csv = os.path.join(cwd, f'results/hpo_impact_imagenet/raw/{args.model}') + os.makedirs(savedir_csv, exist_ok=True) + + # Export heatmap data (Composite Score: Mean ± Std) + heatmap_df_data = [] + for r in range(num_rows): + row_data = [] + for c in range(num_cols): + mean_val = mean_scores['composite'][r, c] + std_val = std_scores['composite'][r, c] + if not np.isnan(mean_val): + row_data.append(f"{mean_val:.4f} ± {std_val:.4f}") + else: + row_data.append("N/A") + heatmap_df_data.append(row_data) + + heatmap_df = pd.DataFrame(heatmap_df_data, index=row_labels_for_csv, columns=col_labels_for_csv) + heatmap_csv_path = os.path.join(savedir_csv, f'{explainer_key}_heatmap_composite.csv') + heatmap_df.to_csv(heatmap_csv_path) + print(f" - Saved heatmap data to: {heatmap_csv_path}") + + # Export row marginal plot data + row_marginal_data = {"Hyperparameter(s)": row_labels_for_csv} + for metric in ['ab_pc', 'simplicity', 'composite']: + row_marginal_data[f'{metric}_mean'] = np.nanmean(mean_scores[metric], axis=1) + row_marginal_data[f'{metric}_std_err'] = np.sqrt(np.nansum(std_scores[metric]**2, axis=1)) / num_cols + + row_df = pd.DataFrame(row_marginal_data) + row_csv_path = os.path.join(savedir_csv, f'{explainer_key}_marginal_rows.csv') + row_df.to_csv(row_csv_path, index=False) + print(f" - Saved row marginal data to: {row_csv_path}") + + # Export column marginal plot data + col_marginal_data = {"Hyperparameter": col_labels_for_csv} + for metric in ['ab_pc', 'simplicity', 'composite']: + col_marginal_data[f'{metric}_mean'] = np.nanmean(mean_scores[metric], axis=0) + col_marginal_data[f'{metric}_std_err'] = np.sqrt(np.nansum(std_scores[metric]**2, axis=0)) / num_rows + + col_df = pd.DataFrame(col_marginal_data) + col_csv_path = os.path.join(savedir_csv, f'{explainer_key}_marginal_columns.csv') + col_df.to_csv(col_csv_path, index=False) + print(f" - Saved column marginal data to: {col_csv_path}") + + # --- Plotting Setup --- + plot_settings = MARGINAL_PLOT_CONFIG.get(explainer_key, MARGINAL_PLOT_CONFIG['default']) + + # Get explainer-specific figure size settings + fig_settings = FIGURE_SIZE_CONFIG.get(explainer_key, FIGURE_SIZE_CONFIG['default']) + cell_size = fig_settings['cell_size'] + width_padding_size = fig_settings['width_padding'] + height_padding_size = fig_settings['height_padding'] + + # Define fixed physical sizes for plot components + marg_row_width = 2.0 + marg_col_height = 2.0 + colorbar_width = 0.2 + + fig_width = (num_cols * cell_size) + colorbar_width + marg_row_width + width_padding_size + fig_height = (num_rows * cell_size) + marg_col_height + height_padding_size + + fig = plt.figure(figsize=(fig_width, fig_height), constrained_layout=True) + + # Create a granual Gridspec for precise control + gs = gridspec.GridSpec( + nrows=2, ncols=3, + figure=fig, + width_ratios=[num_cols * cell_size, colorbar_width, marg_row_width], + height_ratios=[num_rows * cell_size, marg_col_height], + wspace=0.00, hspace=0.00, + ) + + ax_colorbar = fig.add_subplot(gs[0, 1]) + ax_heatmap = fig.add_subplot(gs[0, 0]) + ax_marg_row = fig.add_subplot(gs[0, 2], sharey=ax_heatmap) + ax_marg_col = fig.add_subplot(gs[1, 0], sharex=ax_heatmap) + ax_corner = fig.add_subplot(gs[1, 2]) + + # Define fontsize + additional_fontsize = fig_settings['additional_fontsize'] + fontsize_heatmap_score = 12 + additional_fontsize + 2 + fontsize_ticklabels = 12 + additional_fontsize + 2 + fontsize_marginal_ticks = 12 + additional_fontsize + 3 + fontsize_labels = 14 + additional_fontsize + 3 + fontsize_legend = 16 + additional_fontsize + 0 + + # Define linewidth + linewidth_boxes = 5 + + # Define markersize + markersize = 6 + + # Hide tick labels on marginal plots initially + plt.setp(ax_marg_col.get_xticklabels(), visible=False) + + # --- Plot Heatmap --- + im = ax_heatmap.imshow(mean_scores['composite'], cmap='BuPu', aspect='equal') + + # Add grid lines to separate cells + for i in range(num_rows + 1): + ax_heatmap.axhline(i - 0.5, color='black', linewidth=0.5) + for i in range(num_cols + 1): + ax_heatmap.axvline(i - 0.5, color='black', linewidth=0.5) + + ax_heatmap.set_xticks(np.arange(num_cols)) + ax_heatmap.set_yticks(np.arange(num_rows)) + + # Column labels (top) + short_col_hp_key = LATEX_SYMBOL_MAP.get(column_hp_key, LABEL_MAP.get(column_hp_key, column_hp_key)) + ax_heatmap.set_xlabel(short_col_hp_key, fontsize=fontsize_labels, fontweight='bold') + ax_heatmap.xaxis.set_ticks_position('top') + ax_heatmap.xaxis.set_label_position('top') + + col_tick_labels = [] + for val in column_values: + if column_hp_key == 'pooling_method': + col_tick_labels.append(POOLING_METHOD_VALUE_MAP.get(val, val)) + elif isinstance(val, float): + col_tick_labels.append(f"{val:.2g}") + else: + col_tick_labels.append(str(val)) + + if explainer_key != 'guided_grad_cam': + ax_heatmap.set_xticklabels(col_tick_labels, rotation=0, fontsize=fontsize_ticklabels) + else: + ax_heatmap.set_xticklabels(col_tick_labels, rotation=10, fontsize=fontsize_ticklabels) + + # Row labels (left) + short_row_hp_keys = [LATEX_SYMBOL_MAP.get(key, LABEL_MAP.get(key, key)) for key in row_hp_keys] + ax_heatmap.set_ylabel(" & ".join(short_row_hp_keys), fontsize=fontsize_labels, fontweight='bold') + + row_tick_labels = [] + for combo in row_combinations: + current_params_dict = dict(zip(row_hp_keys, combo)) + label_parts = [] + if not row_hp_keys: + label_parts.append("Default") + else: + for r_key in row_hp_keys: + val = current_params_dict[r_key] + if r_key == 'pooling_method': + label_parts.append(POOLING_METHOD_VALUE_MAP.get(val, val)) + elif isinstance(val, float): + label_parts.append(f"{val:.2g}") + else: + label_parts.append(str(val)) + row_tick_labels.append("\n& ".join(label_parts)) + + ax_heatmap.set_yticklabels(row_tick_labels, rotation=0, ha='right', va='center', fontsize=fontsize_ticklabels) + + # Calculate a threshold to switch text color from black to white for readability + all_scores = mean_scores['composite'] + min_score, max_score = np.nanmin(all_scores), np.nanmax(all_scores) + + # Set threshold at 50% of the score range + color_threshold = min_score + (max_score - min_score) * 0.5 + + # Add text annotations with 2 decimal places + for r in range(num_rows): + for c in range(num_cols): + mean_val, std_val = mean_scores['composite'][r, c], std_scores['composite'][r, c] + if not np.isnan(mean_val): + text_color = 'w' if mean_val > color_threshold else 'k' + ax_heatmap.text(c, r, f"{mean_val:.2f}\n(±{std_val:.2f})", ha="center", va="center", color=text_color, fontsize=fontsize_heatmap_score) + + # Add highlight boxes + highlight_cmap = plt.cm.get_cmap('Set2') + highlight_patches = [] + if default_indices: + def_r, def_c = default_indices + + patch_default = plt.Rectangle((default_indices[1]-0.5, default_indices[0]-0.5), 1, 1, + fill=False, edgecolor=highlight_cmap(1), lw=linewidth_boxes, label='Default') + ax_heatmap.add_patch(patch_default) + highlight_patches.append(patch_default) + + # lrp_epsilon_gamma_box + # Get the exact values from the config dictionary + default_params = DEFAULT_PARAMS_CONFIG[explainer_key] + exact_gamma = default_params.get('gamma') + exact_epsilon = default_params.get('epsilon') + + if exact_gamma is not None and exact_epsilon is not None: + annotation_text = f"Default\n(γ={exact_gamma:.2f}, ε={exact_epsilon:.2f})" + + # Place text in the bottom-right of the cell + ax_heatmap.text(def_c + 0.45, def_r + 0.45, annotation_text, + color="white", fontsize=9, + ha='right', va='bottom', + bbox=dict(boxstyle='round,pad=0.2', fc='black', ec='none', alpha=0.6)) + + if optimized_indices: + patch_optimized = plt.Rectangle((optimized_indices[1]-0.5, optimized_indices[0]-0.5), 1, 1, + fill=False, edgecolor=highlight_cmap(0), lw=linewidth_boxes, linestyle='-', label='Optimized') + ax_heatmap.add_patch(patch_optimized) + highlight_patches.append(patch_optimized) + + # lrp_epsilon_gamma_box + # Add text annotation for the precise optimized values + opt_r, opt_c = optimized_indices + + # Get the exact floating point values from the 'best_params' dictionary + exact_gamma = best_params.get('gamma') + exact_epsilon = best_params.get('epsilon') + + if exact_gamma is not None and exact_epsilon is not None: + annotation_text = f"Optimized\n(γ={exact_gamma:.2f}, ε={exact_epsilon:.2f})" + + # Place text near the patch + ax_heatmap.text(opt_c + 0.1, opt_r + 0.1, annotation_text, + color="white", fontsize=10, fontweight='bold', + ha='left', va='top', + bbox=dict(boxstyle='round,pad=0.2', fc='black', ec='none', alpha=0.6)) + + # Add colorbar with height matching heatmap + fig.colorbar(im, cax=ax_colorbar).set_label("Mean Composite Score", rotation=-90, va="bottom", fontsize=fontsize_labels, fontweight='bold') + + # --- Draw Column Marginalized Scores (Bottom Row) --- + ax_marg_col_twin = ax_marg_col.twinx() + colors = plt.cm.get_cmap('tab10') + + x_positions = np.arange(len(column_values)) + abpc_col_scores = np.nanmean(mean_scores['ab_pc'], axis=0) + comp_col_scores = np.nanmean(mean_scores['composite'], axis=0) + simplicity_col_scores = np.nanmean(mean_scores['simplicity'], axis=0) + + # Plot AbPC and Composite on the primary Y-axis + line_comp_col = ax_marg_col.plot(x_positions, comp_col_scores, marker='s', linestyle='-', color=colors(2), label='Composite', markersize=markersize)[0] + line_abpc_col = ax_marg_col.plot(x_positions, abpc_col_scores, marker='o', linestyle='--', color=colors(0), label='AbPC', markersize=markersize)[0] + ax_marg_col.set_ylabel('AbPC / Composite', color='black', fontsize=fontsize_labels, fontweight='bold') + + # Plot Simplicity on the twin Y-axis + line_simplicity_col = ax_marg_col_twin.plot(x_positions, simplicity_col_scores, marker='^', linestyle='--', color=colors(1), label='Simplicity', markersize=markersize)[0] + ax_marg_col_twin.set_ylabel('Simplicity', color='black', fontsize=fontsize_labels, fontweight='bold') + + # Capture the label artist objects to ensure they are not clipped + label_artist_col_1 = ax_marg_col.yaxis.get_label() + label_artist_col_2 = ax_marg_col_twin.yaxis.get_label() + + ax_marg_col.tick_params(axis='y', labelsize=fontsize_marginal_ticks) + ax_marg_col_twin.tick_params(axis='y', labelcolor='black', labelsize=fontsize_marginal_ticks) + ax_marg_col.yaxis.set_major_locator(mticker.MaxNLocator(nbins=plot_settings['col']['nbins'], prune='both')) + ax_marg_col_twin.yaxis.set_major_locator(mticker.MaxNLocator(nbins=plot_settings['col_twin']['nbins'], prune='both')) + + # Format tick labels + ax_marg_col.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, p: f"{x:{plot_settings['col']['format']}}")) + ax_marg_col_twin.yaxis.set_major_formatter(plt.FuncFormatter(lambda x, p: f"{x:{plot_settings['col_twin']['format']}}")) + + ax_marg_col.set_xticks(x_positions) + ax_marg_col.grid(True, linestyle='--', alpha=0.6) + ax_marg_col.set_xlim(-0.5, len(column_values) - 0.5) + + # --- Draw Row Marginalized Scores (Right Column) --- + ax_marg_row_twin = ax_marg_row.twiny() + y_positions = np.arange(len(row_combinations)) + + abpc_row_scores = np.round(np.nanmean(mean_scores['ab_pc'], axis=1), decimals=4) + comp_row_scores = np.round(np.nanmean(mean_scores['composite'], axis=1), decimals=4) + simplicity_row_scores = np.round(np.nanmean(mean_scores['simplicity'], axis=1), decimals=4) + + # Plot Simplicity on the primary X-axis + line_simplicity_row = ax_marg_row.plot(simplicity_row_scores, y_positions, marker='^', linestyle='--', color=colors(1), label='Simplicity', markersize=markersize)[0] + ax_marg_row.set_xlabel('Simplicity', color='black', fontsize=fontsize_labels, fontweight='bold') + + # Plot AbPC and Composite on the twin X-axis + line_comp_row = ax_marg_row_twin.plot(comp_row_scores, y_positions, marker='s', linestyle='-', color=colors(2), label='Composite', markersize=markersize)[0] + line_abpc_row = ax_marg_row_twin.plot(abpc_row_scores, y_positions, marker='o', linestyle='--', color=colors(0), label='AbPC', markersize=markersize)[0] + ax_marg_row_twin.set_xlabel('AbPC / Composite', color='black', fontsize=fontsize_labels, fontweight='bold') + + # Capture the label artist objects to ensure they are not clipped + label_artist_row_1 = ax_marg_row.xaxis.get_label() + label_artist_row_2 = ax_marg_row_twin.xaxis.get_label() + + ax_marg_row.tick_params(axis='x', labelsize=fontsize_marginal_ticks) + ax_marg_row_twin.tick_params(axis='x', labelcolor='black', labelsize=fontsize_marginal_ticks) + ax_marg_row.xaxis.set_major_locator(mticker.MaxNLocator(nbins=plot_settings['row']['nbins'], prune='both')) + ax_marg_row_twin.xaxis.set_major_locator(mticker.MaxNLocator(nbins=plot_settings['row_twin']['nbins'], prune='both')) + + ax_marg_row_twin.xaxis.set_label_position("top") + ax_marg_row_twin.xaxis.tick_top() + + # Format tick labels + ax_marg_row.xaxis.set_major_formatter(plt.FuncFormatter(lambda x, p: f"{x:{plot_settings['row']['format']}}")) + ax_marg_row_twin.xaxis.set_major_formatter(plt.FuncFormatter(lambda x, p: f"{x:{plot_settings['row_twin']['format']}}")) + + ax_marg_row.grid(True, linestyle='--', alpha=0.6) + ax_marg_row.tick_params(axis='y', which='both', labelleft=False, labelright=False) + + # --- Corner Cell for Combined Legend --- + ax_corner.axis('off') + + # Collect all legend elements + metric_lines = [line_comp_col, line_abpc_col, line_simplicity_col] + metric_labels = [r'Composite ($\uparrow$)', r'AbPC ($\uparrow$)', r'Simplicity ($\uparrow$)'] + + all_lines = metric_lines + highlight_patches + all_labels = metric_labels + [patch.get_label() for patch in highlight_patches] + + # Create comprehensive legend + legend = ax_corner.legend( + all_lines, + all_labels, + loc='center', + fontsize=fontsize_legend, + frameon=True, + fancybox=True, + shadow=True, + borderpad=1.0, + columnspacing=1.0, + handlelength=1.0, + handletextpad=0.5 + ) + + # Calculate the dynamic font size proportional to the figure width + current_width = figure_widths.get(explainer_key, anchor_width) + dynamic_suptitle_fontsize = anchor_fontsize * (current_width / anchor_width) + + # Create a list of all extra artists that need to be included in the final saved area + extra_artists_to_include = (legend, label_artist_col_1, label_artist_col_2, label_artist_row_1, label_artist_row_2,) + + # --- Save Figure --- + savedir = os.path.join(cwd, f'results/hpo_impact_imagenet/figures/{args.model}') + os.makedirs(savedir, exist_ok=True) + save_fname = os.path.join(savedir, f'{explainer_key}_composite_heatmap.pdf') + plt.savefig( + save_fname, + dpi=300, + bbox_inches='tight', + bbox_extra_artists=extra_artists_to_include + ) + plt.close(fig) + print(f" Saved dataset-level composite plot to: {save_fname}") + + +def plot_attribution_comparison(args): + """ + Generates a figure showing qualitative examples of default vs. optimized attribution heatmaps. + """ + cwd = os.getcwd() + + # --- Load Dataset and Transform --- + _, transform = get_torchvision_model(args.model) + dataset = get_imagenet_val_dataset(transform, args.data_dir) + + # Define explainer-specific default parameters to locate them on the grid + DEFAULT_PARAMS_CONFIG = { + 'smooth_grad': { + 'n_iter': 20, + 'noise_level': 0.1, + }, + 'lrp_epsilon_gamma_box': { + 'epsilon': 1e-6, + 'gamma': 0.25, + }, + 'integrated_gradients': { + 'n_steps': 20, + 'baseline_fn': 'zeros', + }, + 'guided_grad_cam': { + 'interpolate_mode': 'nearest', + 'pooling_method': 'sumpos', + 'normalization_method': 'identity', + }, + } + FIGURE_SIZE_CONFIG = { + 'default': { + 'cell_size': 1.0, 'width_padding': 1.0, 'height_padding': 0.0, 'additional_fontsize': 0 + }, + 'smooth_grad': { + 'cell_size': 1.0, 'width_padding': 1.0, 'height_padding': 0.0, 'additional_fontsize': 0 + }, + 'lrp_epsilon_gamma_box': { + 'cell_size': 1.0, 'width_padding': 1.0, 'height_padding': 0.0, 'additional_fontsize': 0 + }, + 'guided_grad_cam': { + 'cell_size': 1.2, 'width_padding': 0.0, 'height_padding': 0.05, 'additional_fontsize': 2 + }, + 'integrated_gradients': { + 'cell_size': 1.2, 'width_padding': 0.0, 'height_padding': 0.35, 'additional_fontsize': 2 + }, + } + # Configuration for plot layouts and instance indices + QUALITATIVE_CONFIG = { + 'smooth_grad': { + 'instances': [66, 122], 'grid_shape': (1, 6) + }, + 'lrp_epsilon_gamma_box': { + 'instances': [81, 93], 'grid_shape': (1, 6) + }, + 'integrated_gradients': { + 'instances': [6, 16], 'grid_shape': (1, 6) + }, + 'guided_grad_cam': { + 'instances': [24, 68], 'grid_shape': (1, 6) + }, + } + # Configuration for the position of score annotations per explainer per instance + ANNOTATION_POS_CONFIG = { + 'default': ['bottom-left', 'bottom-left'], + 'smooth_grad': ['bottom-left', 'top-left'], + 'lrp_epsilon_gamma_box': ['bottom-left', 'bottom-left'], + 'guided_grad_cam': ['bottom-left', 'bottom-left'], + 'integrated_gradients': ['top-left', 'top-left'], + } + + for explainer_key in args.eval_explainer: + print(f"\n--- Visualizing Heatmap Examples: {explainer_key} ---") + + # Load pre-computed analysis results and optimized hyperparameters + hps_source = EXPL_HYPERPARAMETERS + load_path = os.path.join(cwd, f'results/hpo_impact_imagenet/raw/{args.model}/{explainer_key}.pkl') + best_params_path = os.path.join(cwd, f'data/ImageNet/optimized_params/{args.model}/{explainer_key}.pkl') + + # For lrp_epsilon_gamma_box, use the special 2x2 grid data and config + if explainer_key == 'lrp_epsilon_gamma_box': + print(" -> Using special 2x2 grid configuration for LRPEpsilonGammaBox qualitative examples.") + hps_source = QUALITATIVE_HYPERPARAMETERS + load_path = os.path.join(cwd, f'results/hpo_impact_imagenet/raw/{args.model}/{explainer_key}_2x2.pkl') + + if not os.path.exists(load_path) or not os.path.exists(best_params_path): + print(f" ERROR: Missing results file or params file. Please run --analyze first.") + print(f" - Checked for results: {load_path}") + print(f" - Checked for params: {best_params_path}") + continue + + results = load_pickle_data(load_path) + best_params = load_pickle_data(best_params_path)['composite'] + predicted_labels = results.get('predicted_labels', {}) + all_images_eval_scores = results.get('all_images_eval_scores', {}) + + # Get HP configuration + plot_config = PLOTTING_CONFIGS.get(explainer_key, DEFAULT_PLOT_CONFIG) + column_hp_key = plot_config['column_hp'] + row_hp_keys = plot_config['row_hps'] + all_hps_for_explainer = {**hps_source.get(explainer_key, {}), **COMMON_HYPERPARAMETERS} + column_values = all_hps_for_explainer.get(column_hp_key, []) + row_param_values_lists = [all_hps_for_explainer.get(k, []) for k in row_hp_keys] + row_combinations = list(itertools.product(*row_param_values_lists)) + + if explainer_key in DEFAULT_PARAMS_CONFIG: + print(f" Found specific default parameters for {explainer_key}.") + default_params_to_find = DEFAULT_PARAMS_CONFIG[explainer_key] + default_indices = _find_param_indices(default_params_to_find, row_hp_keys, row_combinations, column_hp_key, column_values) + else: + print(" Using (0, 0) as the default parameter index.") + default_indices = (0, 0) + + optimized_indices = _find_param_indices(best_params, row_hp_keys, row_combinations, column_hp_key, column_values) + + # Check if required data is available + if not default_indices or not optimized_indices: + print(" - Missing default or optimized indices. Skipping qualitative plot.") + return + + print(f" Default HP indices: {default_indices}") + print(f" Optimized HP indices: {optimized_indices}") + + print(f" Generating qualitative example plot for {explainer_key}...") + + # Check if the explainer has a defined qualitative plot config + if explainer_key not in QUALITATIVE_CONFIG: + print(f" - No qualitative config for {explainer_key}. Skipping plot.") + return + + config = QUALITATIVE_CONFIG[explainer_key] + instance_indices = config['instances'] + num_rows, num_cols = config['grid_shape'] + + # Verify that all required instances exist in the results + all_images_heatmaps = results.get('all_images_heatmaps', {}) + for idx in instance_indices: + if idx not in all_images_heatmaps: + print(f" - Instance index {idx} not found in results. Skipping qualitative plot.") + return + + # --- Plotting Setup --- + # Get explainer-specific figure size settings + fig_settings = FIGURE_SIZE_CONFIG.get(explainer_key, FIGURE_SIZE_CONFIG['default']) + cell_size = fig_settings['cell_size'] + width_padding_size = fig_settings['width_padding'] + height_padding_size = fig_settings['height_padding'] + + # Define fixed physical sizes for plot components + marg_row_width = 2.0 + marg_col_height = 2.0 + colorbar_width = 0.2 + + fig_height = (7 * cell_size) + marg_col_height + height_padding_size + fig_width = fig_height / num_rows * num_cols + + fig = plt.figure(figsize=(num_cols * 2.5, num_rows * 6)) + + # Create an outer grid (1x2) to separate the two instances. + outer_gs = gridspec.GridSpec(1, 2, figure=fig, wspace=0.15) + + # Create two inner grids (1x3 each), one for each outer cell. + inner_gs_1 = gridspec.GridSpecFromSubplotSpec(1, 3, subplot_spec=outer_gs[0], wspace=0.1) + inner_gs_2 = gridspec.GridSpecFromSubplotSpec(1, 3, subplot_spec=outer_gs[1], wspace=0.1) + + # Create the axes from the inner grids. + axes_instance_1 = [fig.add_subplot(inner_gs_1[0, i]) for i in range(3)] + axes_instance_2 = [fig.add_subplot(inner_gs_2[0, i]) for i in range(3)] + + # Combine into a single list of 6 + axes = axes_instance_1 + axes_instance_2 + + highlight_cmap = plt.cm.get_cmap('Set2') + default_color = highlight_cmap(1) + optimized_color = highlight_cmap(0) + edge_linewidth = 6 + + fontsize_labels = 16.8 + fontsize_text = 15 + fontsize_title = 20 + + # --- Plot Attribution Maps --- + # Get the two instances to plot + instance_info = [ + {'idx': instance_indices[0], 'axes': axes[0:3]}, + {'idx': instance_indices[1], 'axes': axes[3:6]} + ] + + for i, info in enumerate(instance_info): + instance_idx = info['idx'] + ax_original, ax_default, ax_optimized = info['axes'] + + instance_heatmaps = all_images_heatmaps[instance_idx] + instance_scores = all_images_eval_scores.get(instance_idx, {}) + + # Get the position config for the current explainer + positions = ANNOTATION_POS_CONFIG.get(explainer_key, ANNOTATION_POS_CONFIG['default']) + # Get the position for the current instance (i: the loop index - 0 or 1) + position_key = positions[i] + + # Set text position parameters based on the config + if position_key == 'top-left': + y_pos, v_align = 0.97, 'top' + else: # Default to 'bottom-left' + y_pos, v_align = 0.03, 'bottom' + + # --- Plotting --- + # Column 1: Original Image + image_tensor, _ = dataset[instance_idx] + denormalized_img = denormalize_image(image_tensor, transform.mean, transform.std) + ax_original.imshow(denormalized_img) + + # Column 2: Default Heatmap + default_heatmap = instance_heatmaps.get(default_indices, np.zeros((224, 224))) + ax_default.imshow(default_heatmap, cmap='Reds') + + # Column 3: Optimized Heatmap + optimized_heatmap = instance_heatmaps.get(optimized_indices, np.zeros((224, 224))) + ax_optimized.imshow(optimized_heatmap, cmap='Reds') + + # --- Labels and Annotations --- + image_label = predicted_labels.get(instance_idx, "Unknown") + ax_original.set_ylabel(f"Label: {image_label}", fontsize=fontsize_labels, fontweight='bold', rotation=90, labelpad=10) + + # Default scores annotation + abpc_def = instance_scores.get((default_indices[0], default_indices[1], 'ab_pc'), torch.tensor(np.nan)).item() + simp_def = -instance_scores.get((default_indices[0], default_indices[1], 'complexity'), torch.tensor(np.nan)).item() + def_text = f"AbPC: {abpc_def:.2f}\nSimp: {simp_def:.3f}" + ax_default.text(0.03, y_pos, def_text, color="white", fontsize=fontsize_text, ha='left', va=v_align, transform=ax_default.transAxes, + bbox=dict(boxstyle='round,pad=0.2', fc='black', ec='none', alpha=0.6)) + + # Optimized scores annotation + abpc_opt = instance_scores.get((optimized_indices[0], optimized_indices[1], 'ab_pc'), torch.tensor(np.nan)).item() + simp_opt = -instance_scores.get((optimized_indices[0], optimized_indices[1], 'complexity'), torch.tensor(np.nan)).item() + opt_text = f"AbPC: {abpc_opt:.2f}\nSimp: {simp_opt:.3f}" + ax_optimized.text(0.03, y_pos, opt_text, color="white", fontsize=fontsize_text, ha='left', va=v_align, transform=ax_optimized.transAxes, + bbox=dict(boxstyle='round,pad=0.2', fc='black', ec='none', alpha=0.6)) + + # --- Styling --- + # Set titles only for the first instance's columns + ax_original.set_title("Input", fontsize=fontsize_title, fontweight='bold', pad=10) + ax_default.set_title("Default", fontsize=fontsize_title, fontweight='bold', pad=10) + ax_optimized.set_title("Optimized", fontsize=fontsize_title, fontweight='bold', pad=10) + + # Apply styles to all axes + ax_original.set_xticks([]); ax_original.set_yticks([]) + for ax, color in [(ax_default, default_color), (ax_optimized, optimized_color)]: + ax.set_xticks([]); ax.set_yticks([]) + for spine in ax.spines.values(): + spine.set_edgecolor(color) + spine.set_linewidth(edge_linewidth) + + # --- Save Figure --- + fig.subplots_adjust(left=0.05, right=0.98, bottom=0.15, top=0.85) + savedir = os.path.join(cwd, f'results/hpo_impact_imagenet/figures/{args.model}') + os.makedirs(savedir, exist_ok=True) + save_fname = os.path.join(savedir, f'{explainer_key}_qualitative_examples.pdf') + plt.savefig(save_fname, dpi=150, bbox_inches='tight') + plt.close(fig) + print(f" Saved qualitative examples to: {save_fname}") + + +def main(): + """Main execution function.""" + # arguments + parser = argparse.ArgumentParser(description="Evaluate and Analyze HPO Impact in PnPXAI") + parser.add_argument('--model', type=str, choices=TORCHVISION_MODEL_CHOICES, required=True) + parser.add_argument('--data_dir', type=str, required=True) + parser.add_argument('--data_from', type=int, default=0) + parser.add_argument('--data_to', type=int, default=128) + parser.add_argument('--batch_size', type=int, default=1) + parser.add_argument('--disable_gpu', action='store_true') + parser.add_argument('--analyze', action='store_true', help="Run HPO impact analysis and save results.") + parser.add_argument('--visualize', action='store_true', help="Visualize HPO impact analysis results and attribution maps.") + parser.add_argument('--eval_explainer', type=str, nargs='+') + + args = parser.parse_args() + + if args.analyze: + run_grid_search_evaluation(args) + print("\nEvaluation finished.") + + if args.visualize: + plot_hpo_analysis(args) + plot_attribution_comparison(args) + print("\nVisualization finished.") + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/experiments/scripts/analyze_livertumor_hpo.py b/experiments/scripts/analyze_livertumor_hpo.py index 2b39313..4f366d3 100644 --- a/experiments/scripts/analyze_livertumor_hpo.py +++ b/experiments/scripts/analyze_livertumor_hpo.py @@ -22,7 +22,7 @@ from pnpxai import AutoExplanationForImageClassification from experiments.metrics import RelevanceAccuracy -from experiments.utils.helpers import ( +from experiments.utils import ( set_seed, get_livertumor_dataset, get_livertumor_dataset_from_hf, get_livertumor_model, get_livertumor_model_from_hf, diff --git a/experiments/scripts/analyze_wine_quality.py b/experiments/scripts/analyze_wine_quality.py new file mode 100644 index 0000000..2308e25 --- /dev/null +++ b/experiments/scripts/analyze_wine_quality.py @@ -0,0 +1,1199 @@ +#!/usr/bin/env python3 +""" +Wine Quality XAI Experiment Runner - Standalone Version + +This script runs a complete Wine Quality explanation experiment: +1. Loads data and models +2. Generates explanations using multiple frameworks (PnPXAI, Captum, OmniXAI, OpenXAI, AutoXAI) +3. Evaluates explanations +4. Generates a LaTeX table in markdown format +5. Saves results to results/wine_quality/ + +Usage: + python analyze_wine_quality.py [--n-samples N] [--seed SEED] [--verbose] + +Author: Generated for Wine Quality XAI benchmarking +""" + +import os +import sys +import json +import time +import pickle +import random +import argparse +import logging +import warnings +import subprocess +import functools +from pathlib import Path +from datetime import datetime +from typing import Dict, Any, Tuple, List, Optional, Union +from collections import defaultdict + +import numpy as np +import pandas as pd +import torch +import torch.nn as nn +import yaml +from tqdm import tqdm + +from experiments.utils import ( + set_seed, + get_winequality_dataset, + winequality_transform, + winequality_invert_transform, + winequality_find_idx, + get_winequality_model, + TorchModelForXGBoost, +) + +# Suppress warnings +warnings.filterwarnings('ignore') +warnings.filterwarnings('ignore', message='You are providing multiple inputs for Lime / Kernel SHAP attributions') + +# ============================================================================ +# Explainer Validation +# ============================================================================ + +FRAMEWORK_MODEL_SUPPORT = { + "pnpxai": ["xgb", "tab_resnet"], + "captum": ["xgb", "tab_resnet"], + "omnixai": ["xgb"], + "openxai": ["tab_resnet"], + "autoxai": ["xgb", "tab_resnet"], +} + +FRAMEWORK_EXPLAINER_SUPPORT = { + "pnpxai": ["lime", "shap", "ig", "grad", "sg", "itg", "vg", "lrp"], + "captum": ["lime", "shap", "ig", "grad", "sg", "itg", "lrp"], + "omnixai": ["lime", "shap"], + "openxai": ["lime", "shap", "ig", "grad", "sg", "itg"], + "autoxai": ["lime", "shap"], +} + + +def validate_explainer_args(framework: str, model: str, explainer: str) -> None: + """Validate explainer arguments.""" + if framework not in FRAMEWORK_MODEL_SUPPORT: + raise ValueError(f"Invalid framework: {framework}") + + if model not in FRAMEWORK_MODEL_SUPPORT[framework]: + raise ValueError(f"Framework {framework} does not support model {model}") + + if explainer not in FRAMEWORK_EXPLAINER_SUPPORT[framework]: + raise ValueError(f"Framework {framework} does not support explainer {explainer}") + + +# ============================================================================ +# Explanation Generation Functions +# ============================================================================ + +def explain_with_pnpxai( + model: nn.Module, + X_test: np.ndarray, + y_test: np.ndarray, + explainer_name: Optional[str], + config: Dict[str, Any], + logger: logging.Logger, + batch_size: int = 32, + n_samples: int = 25, + n_steps: int = 50, + model_type: str = "xgb", +) -> np.ndarray: + """Generate explanations using PnPXAI framework.""" + from torch.utils.data import DataLoader, Dataset + from pnpxai import Experiment, AutoExplanation + from pnpxai.core.modality.modality import Modality + from pnpxai.explainers import Lime, KernelShap + from pnpxai.evaluator.metrics import AbPC, Complexity, Metric + from sklearn.cluster import KMeans as SklearnKMeans + from pnpxai.explainers.utils.baselines import BaselineFunction + from pnpxai.explainers.utils.postprocess import NormalizationFunction, minmax + from pnpxai.explainers.base import Tunable + from pnpxai.explainers.types import TunableParameter + + logger.info("Setting up PnPXAI framework...") + + # Define custom dataset + class TabDataset(Dataset): + def __init__(self, inputs, labels): + super().__init__() + self.inputs = inputs + self.labels = labels + + def __len__(self): + return len(self.inputs) + + def __getitem__(self, idx): + return self.inputs[idx], self.labels[idx] + + def collate_fn(batch): + inputs = torch.stack([torch.from_numpy(d[0]) for d in batch]).to(torch.float) + labels = torch.tensor([d[1] for d in batch], dtype=torch.long) + return inputs, labels + + # Create dataset and dataloader + test_dataset = TabDataset(X_test, y_test) + test_loader = DataLoader( + test_dataset, + batch_size=batch_size, + collate_fn=collate_fn, + shuffle=False, + pin_memory=True, + ) + + # Create modality from sample batch + sample_batch = next(iter(test_loader)) + modality = Modality( + dtype=sample_batch[0].dtype, + ndims=sample_batch[0].dim(), + ) + + # Define custom baseline function (KMeans) + bg_data_idx = np.random.choice(len(X_test), size=50, replace=False) + X_bg = X_test[bg_data_idx] + + class KMeans(BaselineFunction, Tunable): + def __init__(self, background_data, n_clusters=8): + self.background_data = background_data + self.n_clusters = TunableParameter( + name='n_clusters', + current_value=n_clusters, + dtype=int, + is_leaf=True, + space={'low': 10, 'high': len(background_data), 'step': 10}, + ) + self.kmeans_ = SklearnKMeans(n_clusters).fit(background_data) + BaselineFunction.__init__(self) + Tunable.__init__(self) + self.register_tunable_params([self.n_clusters]) + + def __call__(self, inputs): + if inputs.ndim == 3: + inputs = inputs.squeeze(1) + cluster_ids = self.kmeans_.predict(inputs.to(torch.float64).numpy()) + cluster_centers = self.kmeans_.cluster_centers_[cluster_ids] + return torch.from_numpy(cluster_centers).float().to(inputs.device) + + # Define custom normalization functions + class Pos(NormalizationFunction): + def __init__(self): + super().__init__() + + def __call__(self, attrs): + return attrs.abs() + + class MinMax(NormalizationFunction): + def __init__(self): + super().__init__() + + def __call__(self, attrs): + return minmax(attrs) / 1000 + + # Define compound metric + class CompoundMetric(Metric): + def __init__( + self, + model, + cmpd_metrics, + weights, + explainer=None, + target_input_keys=None, + additional_input_keys=None, + output_modifier=None, + ): + super().__init__( + model, explainer, target_input_keys, + additional_input_keys, output_modifier, + ) + assert len(cmpd_metrics) == len(weights) + self.cmpd_metrics = cmpd_metrics + self.weights = weights + + def evaluate(self, inputs, targets, attrs): + values = torch.zeros(attrs.size(0)).to(attrs.device) + for weight, metric in zip(self.weights, self.cmpd_metrics): + values += weight * metric.set_explainer(self.explainer).evaluate(inputs, targets, attrs) + return values + + # Create experiment based on model type + logger.info("Creating PnPXAI experiment...") + + if model_type == "tab_resnet": + expr = AutoExplanation( + model=model, + data=test_loader, + modality=modality, + target_input_keys=[0], + target_class_extractor=lambda outputs: outputs.argmax(-1), + label_key='labels', + target_labels=False, + ) + + expr.metrics.delete('morf') + expr.metrics.delete('lerf') + + elif model_type == "xgb": + expr = Experiment( + model=model, + data=test_loader, + modality=modality, + target_input_keys=[0], + target_class_extractor=lambda outputs: outputs.argmax(-1), + label_key=-1, + ) + + # add explainers + expr.explainers.add('kernel_shap', KernelShap) + expr.explainers.add('lime', Lime) + + # add metrics + expr.metrics.add('abpc', AbPC) + + else: + raise ValueError("Invalid model type") + + # Add custom baseline function and default kwargs + expr.modality.util_functions['baseline_fn'].add('kmeans', KMeans) + expr.modality.util_functions['baseline_fn'].add_default_kwargs( + 'background_data', X_bg + ) + + # Add custom normalization functions and complexity metric + expr.modality.util_functions['normalization_fn'].add('pos', Pos) + expr.modality.util_functions['normalization_fn'].add('minmax', MinMax) + expr.metrics.add('cmpx', Complexity) + expr.metrics.add('cmpd', CompoundMetric) + + # Map explainer names + PNP_INV_MAP = { + "kernel_shap": "shap", + "lime": "lime", + "gradient": "grad", + "grad_x_input": "itg", + "integrated_gradients": "ig", + "smooth_grad": "sg", + "lrp_uniform_epsilon": "lrp", + "var_grad": "vg", + } + + explainer_map = { + 'shap': 'kernel_shap', + 'lime': 'lime', + 'grad': 'gradient', + 'itg': 'grad_x_input', + 'ig': 'integrated_gradients', + 'sg': 'smooth_grad', + 'vg': 'var_grad', + 'lrp': 'lrp_uniform_epsilon', + } + + if explainer_name: + pnp_explainer = explainer_map.get(explainer_name, explainer_name) + logger.info(f"Using explainer: {pnp_explainer}") + + # Setup metric options + metric_options = { + 'cmpd_metrics': [ + expr.create_metric('abpc'), + expr.create_metric('cmpx'), + ], + 'weights': [.7, -.3] + } + + # Set direction + direction = 'maximize' + + # Setup disable_tunable_params + disable_tunable_params = {} + if pnp_explainer in ['lime', 'kernel_shap']: + disable_tunable_params['n_samples'] = n_samples + if pnp_explainer in ['integrated_gradients']: + disable_tunable_params['n_steps'] = n_steps + + logger.info("Running hyperparameter optimization...") + opt_results = expr.optimize( + explainer_key=pnp_explainer, + metric_key='cmpd', + metric_options=metric_options, + direction=direction, + disable_tunable_params=disable_tunable_params, + **config['optuna'] + ) + + logger.info(f"Best value: {opt_results.study.best_trial.value:.4f}") + + # Re-set seeds before generating explanations for reproducibility + logger.info("Re-setting random seeds for reproducible explanation generation...") + set_seed(config['optuna'].get('seed', 42)) + + # Generate explanations + opt_explainer = opt_results.explainer + th_test_input = torch.tensor(test_dataset.inputs, dtype=torch.float32) + targets = model(th_test_input).argmax(-1) + + exp_name = PNP_INV_MAP[pnp_explainer] + + if exp_name in ["shap", "lime"]: + explanations = opt_explainer.attribute(th_test_input, targets)[0].detach().cpu().numpy() + else: + explanations = opt_explainer.attribute(th_test_input, targets).squeeze(1).detach().cpu().numpy() + + else: + raise ValueError("PnPXAI requires explainer name") + + return explanations + + +def explain_with_captum( + model: nn.Module, + X_test: np.ndarray, + y_test: np.ndarray, + explainer_name: str, + config: Dict[str, Any], + logger: logging.Logger, + n_samples: int = 25, +) -> np.ndarray: + """Generate explanations using Captum framework.""" + from captum.attr import ( + KernelShap, Lime, IntegratedGradients, Saliency, + InputXGradient, NoiseTunnel, LRP + ) + from captum.attr._utils.lrp_rules import EpsilonRule + + logger.info(f"Setting up Captum framework with {explainer_name}...") + + # Create explainer + if explainer_name == "shap": + explainer = KernelShap(model) + elif explainer_name == "lime": + explainer = Lime(model, interpretable_model=None) + elif explainer_name == "grad": + explainer = Saliency(model) + elif explainer_name == "itg": + explainer = InputXGradient(model) + elif explainer_name == "ig": + explainer = IntegratedGradients(model, multiply_by_inputs=True) + elif explainer_name == "sg": + explainer = NoiseTunnel(Saliency(model)) + elif explainer_name == "lrp": + # Set LRP rules for batch norm layers + if hasattr(model, 'res_blocks'): + for block in model.res_blocks: + if hasattr(block, 'bn'): + block.bn.rule = EpsilonRule() + if hasattr(model, 'bn'): + model.bn.rule = EpsilonRule() + explainer = LRP(model) + else: + raise ValueError(f"Unknown Captum explainer: {explainer_name}") + + # Convert to tensor + X_test_t = torch.tensor(X_test, dtype=torch.float32) + targets = model(X_test_t).argmax(dim=1) + + # Generate explanations + logger.info("Generating explanations...") + + if explainer_name == "grad": + explanations = explainer.attribute(X_test_t, target=targets, abs=False) + explanations = explanations.detach().numpy() + + elif explainer_name == "sg": + explanations = explainer.attribute(X_test_t, target=targets, nt_type='smoothgrad') + explanations = explanations.detach().numpy() + + elif explainer_name in ("shap", "lime"): + # Process in batches to avoid memory issues + attrs_list = [] + for i in tqdm(range(len(X_test_t)), desc="Explaining"): + input_i = X_test_t[i].unsqueeze(0) + attr_i = explainer.attribute(input_i, target=targets[i], n_samples=n_samples) + attrs_list.append(attr_i.detach().cpu().numpy()) + explanations = np.concatenate(attrs_list, axis=0) + + else: + explanations = explainer.attribute(X_test_t, target=targets) + explanations = explanations.detach().numpy() + + return explanations + + +def explain_with_omnixai( + model: nn.Module, + X_test: np.ndarray, + y_test: np.ndarray, + explainer_name: str, + config: Dict[str, Any], + logger: logging.Logger, + feature_metadata: Dict[str, Any], + raw_data, + n_samples: int = 64, +) -> np.ndarray: + """Generate explanations using OmniXAI framework.""" + from omnixai.data.tabular import Tabular + from omnixai.explainers.tabular import TabularExplainer + + logger.info(f"Setting up OmniXAI framework with {explainer_name}...") + + NAME_MAP = { + "lime": "LimeTabular", + "shap": "ShapTabular" + } + + explainer_nm = NAME_MAP[explainer_name] + + # Prepare training data + raw_data = raw_data.fillna("missing") + categorical_columns = [c for c in raw_data.columns if feature_metadata[c]["type"] == "categorical"] + train_data = Tabular(raw_data, categorical_columns=categorical_columns) + + # Get target function from wrapped model + if hasattr(model, 'xgb_model'): + target_function = model.xgb_model.predict_proba + else: + raise ValueError("OmniXAI requires XGBoost model") + + # Create transformation functions + transform_fn = functools.partial(winequality_transform, feature_metadata=feature_metadata) + + def prep(z): + return transform_fn(z.data.fillna("missing")) + + # Create explainer + explainer = TabularExplainer( + explainers=[explainer_nm], + mode="classification", + data=train_data, + model=target_function, + preprocess=prep, + ) + + # Prepare test instances + test_instances = winequality_invert_transform(X_test, feature_metadata).fillna("missing") + + # Set parameters + params = { + "LimeTabular": {"num_features": raw_data.shape[1], "num_samples": n_samples}, + "ShapTabular": {"nsamples": n_samples} + } + + # Generate explanations + logger.info("Generating explanations...") + exp_obj = explainer.explain(test_instances, params=params) + + # Extract and reorder scores + scores = [] + for i in range(test_instances.shape[0]): + exp = exp_obj[explainer_nm].get_explanations(i) + sorted_idx = winequality_find_idx(exp['features'], exp['instance'].columns.tolist()) + scores.append([exp['scores'][j] for j in sorted_idx]) + + explanations = np.array(scores) + return explanations + + +def explain_with_openxai( + model: nn.Module, + X_test: np.ndarray, + y_test: np.ndarray, + X_train: np.ndarray, + explainer_name: str, + config: Dict[str, Any], + logger: logging.Logger, + feature_metadata: Dict[str, Any], + batch_size: int = 32, + n_samples: int = 64, +) -> np.ndarray: + """Generate explanations using OpenXAI framework.""" + from torch.utils.data import DataLoader, TensorDataset + from openxai import Explainer + from openxai.experiment_utils import fill_param_dict + + logger.info(f"Setting up OpenXAI framework with {explainer_name}...") + + # Convert to tensors + test_input = torch.tensor(X_test, dtype=torch.float32) + train_input = None + explainer_params = {} + + # Setup training data for LIME/IG + if explainer_name in ['lime', 'ig']: + train_input = torch.tensor(X_train, dtype=torch.float32) + explainer_params = fill_param_dict(explainer_name, {}, train_input) + + if explainer_name in ['lime', 'shap']: + explainer_params['n_samples'] = n_samples + + # Create explainer + explainer = Explainer(method=explainer_name, model=model, param_dict=explainer_params) + + # Get predictions + predicted_labels = model(test_input).detach().argmax(dim=1) + + # Create data loader + dataset = TensorDataset(test_input, predicted_labels) + data_loader = DataLoader(dataset, batch_size=batch_size, shuffle=False) + + # Generate explanations + logger.info("Generating explanations...") + all_explanations = [] + for batch_inputs, batch_labels in tqdm(data_loader, desc="Explaining batches"): + batch_explanations = explainer.get_explanations(batch_inputs, label=batch_labels) + all_explanations.append(batch_explanations) + + # Combine batches + combined_explanations = torch.cat(all_explanations, dim=0) + + # Aggregate categorical features + processed_explanations = [] + for feature_name, feature_info in feature_metadata.items(): + if feature_info['type'] == 'categorical': + feature_index = feature_info['index'] + onehot_encoded = test_input[:, feature_index] + explanation_values = combined_explanations[:, feature_index] + categorical_explanation = (onehot_encoded * explanation_values).sum(dim=1) + processed_explanations.append(categorical_explanation) + else: + feature_index = feature_info['index'] + processed_explanations.append(combined_explanations[:, feature_index]) + + explanations = torch.stack(processed_explanations, dim=1).detach().numpy() + return explanations + + +class PyTorchModelWrapper: + """Wrapper for PyTorch models to add predict and predict_proba methods for AutoXAI.""" + + def __init__(self, pytorch_model): + self.model = pytorch_model + self.model.eval() + + def predict(self, X): + """Predict class labels for samples.""" + if not isinstance(X, torch.Tensor): + X = torch.FloatTensor(X) + + with torch.no_grad(): + logits = self.model(X) + predictions = torch.argmax(logits, dim=1) + + return predictions.cpu().numpy() + + def predict_proba(self, X): + """Predict class probabilities for samples.""" + if not isinstance(X, torch.Tensor): + X = torch.FloatTensor(X) + + with torch.no_grad(): + logits = self.model(X) + probas = torch.softmax(logits, dim=1) + + return probas.cpu().numpy() + + +def explain_with_autoxai( + model: nn.Module, + X_test: np.ndarray, + y_test: np.ndarray, + explainer_name: str, + config: Dict[str, Any], + logger: logging.Logger, + raw_data, + batch_size: int = 32, + n_samples: int = 64, +) -> np.ndarray: + """Generate explanations using AutoXAI framework.""" + import glob + + # Add AutoXAI virtual environment to sys.path + autoxai_venv = "/opt/autoxai_venv" + if os.path.exists(autoxai_venv): + site_packages = glob.glob(f"{autoxai_venv}/lib/python*/site-packages") + if site_packages: + sys.path.insert(0, site_packages[0]) + + autoxai_path = os.path.join(os.path.dirname(__file__), "lib", "AutoXAI") + sys.path.insert(0, autoxai_path) + from hyperparameters_optimization import get_parameters + from XAI_solutions import set_up_explainer, get_local_exp + + logger.info(f"Setting up AutoXAI framework with {explainer_name}...") + + AUTOXAI_NAME_MAP = {"shap": "SHAP", "lime": "LIME"} + autoxai_nm = AUTOXAI_NAME_MAP[explainer_name] + + bg_size = min(50, len(X_test)) + + # Prepare model for AutoXAI + if isinstance(model, TorchModelForXGBoost): + unwrapped_model = model.xgb_model + elif isinstance(model, nn.Module): + unwrapped_model = PyTorchModelWrapper(model) + else: + unwrapped_model = model + + # Setup context + properties_list = ["robustness", "fidelity", "conciseness"] + context = {} + rand_idx = np.random.randint(0, X_test.shape[0], bg_size) + context["X"] = X_test[rand_idx] + context["y"] = y_test[rand_idx] + context["feature_names"] = list(raw_data.columns) + context["verbose"] = False + context["task"] = "classification" + context["question"] = "Why" + context["session_id"] = f"_{bg_size}_wine" + context["scaling"] = "Std" + context["weights"] = [1, 2, 0.5] + context["distance"] = "cosine" + context["explanations"] = [] + context["model"] = unwrapped_model + context["ES"] = True + context["IS"] = True + + score_hist = { + "xai_sol": [], "epoch": [], "aggregated_score": [], + "parameters": [], "robustness": [], "scaled_robustness": [], + "fidelity": [], "scaled_fidelity": [], + "conciseness": [], "scaled_conciseness": [] + } + + # Get default parameters + logger.info("Preparing AutoXAI explainer with default parameters...") + default_parameters = get_parameters( + autoxai_nm, score_hist, "default", properties_list, context) + + # Setup explainer + context['explainer'] = set_up_explainer(autoxai_nm, default_parameters, context) + + # Generate explanations for all test samples + logger.info("Generating explanations...") + explanations = np.zeros_like(X_test) + for i in tqdm(range(len(X_test)), desc="Explaining"): + e = get_local_exp(autoxai_nm, X_test[i], default_parameters, context) + idx = default_parameters["most_influent_features"] + explanations[i, idx] = e + + return explanations + + +def evaluate_explanations( + explanations: np.ndarray, + model: nn.Module, + X_test: np.ndarray, + y_test: np.ndarray, + logger: logging.Logger, + batch_size: int = 32, +) -> Dict[str, np.ndarray]: + """Evaluate explanations using PnPXAI metrics.""" + from torch.utils.data import DataLoader, TensorDataset + from pnpxai import Experiment + from pnpxai.core.modality.modality import Modality + from pnpxai.explainers import KernelShap + from pnpxai.evaluator.metrics import AbPC, Complexity, Metric + + logger.info("Evaluating explanations...") + + # Create dataloader + test_dataset = TensorDataset( + torch.tensor(X_test, dtype=torch.float32), + torch.tensor(y_test, dtype=torch.long) + ) + test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False) + + # Create experiment for metrics + sample_batch = next(iter(test_loader)) + modality = Modality( + dtype=sample_batch[0].dtype, + ndims=sample_batch[0].dim(), + ) + + expr = Experiment( + model=model, + data=test_loader, + modality=modality, + target_input_keys=[0], + target_class_extractor=lambda outputs: outputs.argmax(-1), + label_key=-1, + ) + + # Add explainers + expr.explainers.add('kernel_shap', KernelShap) + + # Add metrics + expr.metrics.add('abpc', AbPC) + expr.metrics.add('cmpx', Complexity) + + # Compound metric + class CompoundMetric(Metric): + def __init__(self, model, cmpd_metrics, weights, explainer=None, + target_input_keys=None, additional_input_keys=None, output_modifier=None): + super().__init__(model, explainer, target_input_keys, additional_input_keys, output_modifier) + self.cmpd_metrics = cmpd_metrics + self.weights = weights + + def evaluate(self, inputs, targets, attrs): + values = torch.zeros(attrs.size(0)).to(attrs.device) + for weight, metric in zip(self.weights, self.cmpd_metrics): + values += weight * metric.set_explainer(self.explainer).evaluate(inputs, targets, attrs) + return values + + expr.metrics.add('cmpd', CompoundMetric) + + # Create dummy explainer for evaluation + dummy_explainer = expr.create_explainer('kernel_shap') + + # Evaluate each metric + results = {} + X_test_t = torch.tensor(X_test, dtype=torch.float32) + explanations_t = torch.tensor(explanations, dtype=torch.float32) + + for metric_name in ['abpc', 'cmpx']: + metric = expr.create_metric(metric_name) + metric_values = [] + + for i in range(len(X_test)): + inputs = {0: X_test_t[i].unsqueeze(0)} + targets = model(inputs[0]).argmax(-1) + attrs = explanations_t[i].unsqueeze(0) + + value = metric.set_explainer(dummy_explainer).evaluate(inputs, targets, attrs) + metric_values.append(value.item()) + + results[metric_name] = np.array(metric_values) + + # Compound metric + metric_options = { + 'cmpd_metrics': [expr.create_metric('abpc'), expr.create_metric('cmpx')], + 'weights': [0.7, -0.3] + } + cmpd_metric = expr.create_metric('cmpd', **metric_options) + cmpd_values = [] + + for i in range(len(X_test)): + inputs = {0: X_test_t[i].unsqueeze(0)} + targets = model(inputs[0]).argmax(-1) + attrs = explanations_t[i].unsqueeze(0) + + value = cmpd_metric.set_explainer(dummy_explainer).evaluate(inputs, targets, attrs) + cmpd_values.append(value.item()) + + results['cmpd'] = np.array(cmpd_values) + + # Log summary + for metric_name, values in results.items(): + logger.info(f" {metric_name.upper()}: {values.mean():.4f} ± {values.std():.4f}") + + return results + + +def run_single_explanation(framework: str, model_name: str, explainer: str, + model: nn.Module, X_test: np.ndarray, y_test: np.ndarray, + X_train: np.ndarray, feature_metadata: Dict, raw_data: pd.DataFrame, + config: Dict, logger: logging.Logger, + n_samples: int = 25) -> Tuple[np.ndarray, Dict]: + """Run a single explanation and evaluation.""" + logger.info(f"Generating {framework}/{model_name}/{explainer} explanations...") + + # Generate explanations based on framework + if framework == "pnpxai": + explanations = explain_with_pnpxai( + model, X_test, y_test, explainer, config, logger, + batch_size=32, n_samples=n_samples, n_steps=50, model_type=model_name + ) + elif framework == "captum": + explanations = explain_with_captum( + model, X_test, y_test, explainer, config, logger, n_samples + ) + elif framework == "omnixai": + explanations = explain_with_omnixai( + model, X_test, y_test, explainer, config, logger, + feature_metadata, raw_data, n_samples + ) + elif framework == "openxai": + explanations = explain_with_openxai( + model, X_test, y_test, X_train, explainer, config, logger, + feature_metadata, batch_size=32, n_samples=n_samples + ) + elif framework == "autoxai": + explanations = explain_with_autoxai( + model, X_test, y_test, explainer, config, logger, + raw_data, batch_size=32, n_samples=n_samples + ) + else: + raise ValueError(f"Unknown framework: {framework}") + + # Evaluate explanations + metrics = evaluate_explanations(explanations, model, X_test, y_test, logger, batch_size=32) + + return explanations, metrics + + +# ============================================================================ +# Experiment Runner +# ============================================================================ + +EXPERIMENTS = { + "pnpxai": { + "xgb": ["lime", "shap"], + "tab_resnet": ["lime", "shap", "grad", "itg", "ig", "sg", "vg", "lrp"], + }, + "captum": { + "xgb": ["lime", "shap"], + "tab_resnet": ["lime", "shap", "grad", "itg", "ig", "sg", "lrp"], + }, + "omnixai": { + "xgb": ["lime", "shap"], + }, + "openxai": { + "tab_resnet": ["lime", "shap", "grad", "itg", "ig", "sg"], + }, + "autoxai": { + "xgb": ["lime", "shap"], + "tab_resnet": ["lime", "shap"], + }, +} + + +def run_all_experiments(data_dir: str = "data/wine_quality", + config_dir: str = "experiments/configs/tabular", + results_dir: str = "results/wine_quality", + n_samples: int = 25, + seed: int = 42, + verbose: bool = False): + """Run all Wine Quality experiments and generate results table.""" + + # Setup logging + level = logging.INFO if verbose else logging.WARNING + logging.basicConfig(level=level, format='%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + logger.info("="*70) + logger.info("Wine Quality XAI Experiment Runner") + logger.info("="*70) + + # Set seeds + set_seed(seed) + logger.info(f"Random seed: {seed}") + + # Load data + logger.info(f"Loading data from: {data_dir}") + X_train, X_test, y_train, y_test, feature_metadata, raw_data = get_winequality_dataset(data_dir) + logger.info(f"Data loaded: Train={len(X_train)}, Test={len(X_test)}, Features={X_test.shape[1]}") + + # Load configs + config_path = Path(config_dir) + with open(config_path / "explainer_config.yaml", 'r') as f: + explainer_config = yaml.safe_load(f) + with open(config_path / "optuna_config.yaml", 'r') as f: + optuna_config = yaml.safe_load(f) + + config = {'explainer': explainer_config, 'optuna': optuna_config} + + # Load models + models = {} + logger.info("Loading models...") + + xgb_model = get_winequality_model("xgb", Path(data_dir) / "xgb_model.json") + models["xgb"] = xgb_model + + resnet_model = get_winequality_model("tab_resnet", Path(data_dir) / "resnet_model.pth", + input_dim=X_train.shape[1], output_dim=2, num_blocks=1) + models["tab_resnet"] = resnet_model + + logger.info("Models loaded successfully") + + # Run all experiments + total_experiments = sum(len(explainers) for fw_models in EXPERIMENTS.values() + for explainers in fw_models.values()) + logger.info(f"Total experiments to run: {total_experiments}") + + completed = 0 + for framework, fw_models in EXPERIMENTS.items(): + for model_name, explainers in fw_models.items(): + for explainer in explainers: + completed += 1 + logger.info(f"\n[{completed}/{total_experiments}] {framework}/{model_name}/{explainer}") + + try: + # Validate combination + validate_explainer_args(framework, model_name, explainer) + + # Reset seeds for each experiment + set_seed(seed) + + # Run explanation + model = models[model_name] + explanations, metrics = run_single_explanation( + framework, model_name, explainer, model, + X_test, y_test, X_train, feature_metadata, raw_data, + config, logger, n_samples + ) + + # Save results + output_dir = Path(results_dir) / model_name / framework / explainer + output_dir.mkdir(parents=True, exist_ok=True) + + np.save(output_dir / "explanations.npy", explanations) + np.save(output_dir / "abpc.npy", metrics['abpc']) + np.save(output_dir / "cmpx.npy", metrics['cmpx']) + np.save(output_dir / "cmpd.npy", metrics['cmpd']) + + logger.info(f"✓ Saved to {output_dir}") + + except Exception as e: + logger.error(f"✗ Failed: {e}") + if verbose: + import traceback + traceback.print_exc() + continue + + logger.info("\n" + "="*70) + logger.info("All experiments completed!") + logger.info("="*70) + + +# ============================================================================ +# LaTeX Table Generation +# ============================================================================ + +def collect_results(results_dir="results/wine_quality"): + """Collect all experiment results.""" + results_path = Path(results_dir) + data = defaultdict(lambda: defaultdict(lambda: defaultdict(dict))) + + for root, dirs, files in os.walk(results_path): + if "explanations.npy" in files: + parts = Path(root).relative_to(results_path).parts + if len(parts) != 3: + continue + + model, framework, explainer = parts + + try: + abpc = np.load(os.path.join(root, "abpc.npy")).mean() + cmpx = np.load(os.path.join(root, "cmpx.npy")).mean() + cmpd = np.load(os.path.join(root, "cmpd.npy")).mean() + + data[model][framework][explainer] = { + 'faithfulness': abpc, + 'complexity': cmpx, + 'composite': cmpd + } + except Exception as e: + print(f"Error loading {root}: {e}") + continue + + return data + + +def format_value(value, best_value, is_complexity=False): + """Format value with bold if it's the best.""" + if value is None: + return "-" + + is_best = abs(value - best_value) < 1e-6 if best_value is not None else False + formatted = f"{value:.4f}" + return f"\\textbf{{{formatted}}}" if is_best else formatted + + +def get_best_value(values, is_complexity=False): + """Get the best value from a list.""" + valid_values = [v for v in values if v is not None] + if not valid_values: + return None + + return min(valid_values) if is_complexity else max(valid_values) + + +def generate_latex_table(data): + """Generate LaTeX table matching the original format.""" + + EXPLAINER_MAP = { + "shap": "KernelSHAP", + "lime": "LIME", + "grad": "Gradient", + "itg": "Grad.$\\times$Input", + "ig": "Integrated Gradients", + "sg": "SmoothGrad", + "vg": "VarGrad", + "lrp": "LRP" + } + + MODEL_MAP = { + "xgb": "XGBoost", + "tab_resnet": "ResNet" + } + + FRAMEWORK_ORDER = ["captum", "omnixai", "autoxai", "openxai", "pnpxai"] + + XGB_EXPLAINERS = ["shap", "lime"] + RESNET_EXPLAINERS = ["shap", "lime", "grad", "itg", "ig", "sg", "vg", "lrp"] + + lines = [] + lines.append("\\begin{table}[!th]") + lines.append(" \\caption{\\textbf{Comparison of explanation performance on Wine Quality dataset.}") + lines.append(" Evaluation of XGBoost and TabResNet models across three key metrics:") + lines.append(" Faithfulness (higher is better $\\uparrow$), Complexity (lower is better $\\downarrow$), and a Composite [Faithfulness, Simplicity] score ($\\uparrow$).") + lines.append(" The table compares PnP-XAI against Captum, AutoXAI, OmniXAI, and OpenXAI.") + lines.append(" Bold values indicate the best score per row; dashes (-) denote unsupported combinations.}") + lines.append(" \\label{tab:wine_performance}") + lines.append(" \\centering") + lines.append(" \\resizebox{\\textwidth}{!}{%") + lines.append(" \\begin{tabular}{lll||cccc|c}") + lines.append(" \\toprule") + lines.append(" \\textbf{Model} & \\textbf{Metric} & \\textbf{Explainer} & \\textbf{Captum} & \\textbf{OmniXAI} & \\textbf{AutoXAI} & \\textbf{OpenXAI} & \\textbf{PnPXAI} \\\\") + lines.append(" \\midrule") + + # Process XGBoost + model_key = "xgb" + model_name = MODEL_MAP[model_key] + + for metric_idx, (metric_key, metric_name, is_complexity) in enumerate([ + ('faithfulness', 'Faithfulness ($\\uparrow$)', False), + ('complexity', 'Complexity ($\\downarrow$)', True), + ('composite', 'Composite [Faithfulness, Simplicity] ($\\uparrow$)', False) + ]): + if metric_idx == 0: + lines.append(f" {model_name} & \\multirow[t]{{2}}{{*}}{{{metric_name}}}") + else: + lines.append(f" & \\multirow[t]{{2}}{{*}}{{{metric_name}}}") + + for exp_idx, exp_key in enumerate(XGB_EXPLAINERS): + exp_name = EXPLAINER_MAP[exp_key] + + values = [] + for fw in FRAMEWORK_ORDER: + if fw in data[model_key] and exp_key in data[model_key][fw]: + values.append(data[model_key][fw][exp_key][metric_key]) + else: + values.append(None) + + best_val = get_best_value(values, is_complexity) + formatted_values = [format_value(v, best_val, is_complexity) for v in values] + + if exp_idx == 0: + lines.append(f" & {exp_name} & {' & '.join(formatted_values)} \\\\") + else: + lines.append(f" & & {exp_name} & {' & '.join(formatted_values)} \\\\") + + if metric_idx < 2: + lines.append(" \\cmidrule{2-8}") + + lines.append(" \\midrule") + + # Process ResNet + model_key = "tab_resnet" + model_name = MODEL_MAP[model_key] + + for metric_idx, (metric_key, metric_name, is_complexity) in enumerate([ + ('faithfulness', 'Faithfulness ($\\uparrow$)', False), + ('complexity', 'Complexity ($\\downarrow$)', True), + ('composite', 'Composite [Faithfulness, Simplicity] ($\\uparrow$)', False) + ]): + if metric_idx == 0: + lines.append(f" {model_name} & \\multirow[t]{{{len(RESNET_EXPLAINERS)}}}{{*}}{{{metric_name}}}") + else: + lines.append(f" & \\multirow[t]{{{len(RESNET_EXPLAINERS)}}}{{*}}{{{metric_name}}}") + + for exp_idx, exp_key in enumerate(RESNET_EXPLAINERS): + exp_name = EXPLAINER_MAP[exp_key] + + values = [] + for fw in FRAMEWORK_ORDER: + if fw in data[model_key] and exp_key in data[model_key][fw]: + values.append(data[model_key][fw][exp_key][metric_key]) + else: + values.append(None) + + best_val = get_best_value(values, is_complexity) + formatted_values = [format_value(v, best_val, is_complexity) for v in values] + + if exp_idx == 0: + lines.append(f" & {exp_name:20s} & {' & '.join(formatted_values)} \\\\") + else: + lines.append(f" & & {exp_name:20s} & {' & '.join(formatted_values)} \\\\") + + if metric_idx < 2: + lines.append(" \\cmidrule{2-8}") + + lines.append(" \\bottomrule") + lines.append(" \\end{tabular}%") + lines.append(" }") + lines.append("\\end{table}") + + return "\n".join(lines) + + +# ============================================================================ +# Main Function +# ============================================================================ + +def main(): + parser = argparse.ArgumentParser( + description="Run Wine Quality XAI experiments and generate results table", + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument("--n_samples", type=int, default=25, + help="Number of samples for LIME/SHAP (default: 25)") + parser.add_argument("--seed", type=int, default=42, + help="Random seed (default: 42)") + parser.add_argument("--verbose", action="store_true", + help="Enable verbose logging") + parser.add_argument("--data_dir", type=str, default="data/wine_quality", + help="Data directory (default: data/wine_quality)") + parser.add_argument("--config_dir", type=str, default="experiments/configs/tabular", + help="Config directory (default: experiments/configs/tabular)") + parser.add_argument("--results_dir", type=str, default="results/wine_quality", + help="Results directory (default: results/wine_quality)") + + args = parser.parse_args() + + start_time = time.time() + + # Run all experiments + print("\n" + "="*70) + print("Starting Wine Quality XAI Experiment") + print("="*70 + "\n") + + run_all_experiments( + data_dir=args.data_dir, + config_dir=args.config_dir, + results_dir=args.results_dir, + n_samples=args.n_samples, + seed=args.seed, + verbose=args.verbose + ) + + # Generate LaTeX table + print("\n" + "="*70) + print("Generating LaTeX table...") + print("="*70 + "\n") + + data = collect_results(args.results_dir) + latex_table = generate_latex_table(data) + + # Save table to results directory + output_file = Path(args.results_dir) / "experiment_result.md" + output_file.parent.mkdir(parents=True, exist_ok=True) + + with open(output_file, 'w') as f: + f.write(latex_table + "\n") + + print(latex_table) + print(f"\n✓ Table saved to: {output_file}") + + elapsed = time.time() - start_time + minutes = int(elapsed // 60) + seconds = int(elapsed % 60) + + print("\n" + "="*70) + print(f"All tasks completed in {minutes}m {seconds}s") + print("="*70 + "\n") + + +if __name__ == "__main__": + main() diff --git a/experiments/utils/__init__.py b/experiments/utils/__init__.py new file mode 100644 index 0000000..10315c1 --- /dev/null +++ b/experiments/utils/__init__.py @@ -0,0 +1,73 @@ +from .utils import ( + set_seed, + set_params, + patch_lrp_explainer, + save_params_csv, + save_pickle_data, + load_pickle_data, + img_to_np, + denormalize_image, + load_model_and_dataloader_for_tutorial +) + +from .datasets import ( + ImageNetDataset, + get_imagenet_dataset, + ImageNetValDataset, + get_imagenet_val_dataset, + IMDBDataset, + get_imdb_dataset, + VQADataset, + get_vqa_dataset, + get_livertumor_dataset, + get_livertumor_dataset_from_hf, + get_imagenet_sample_from_hf, + get_aki_dataset, + get_ecg_dataset_from_hf, + get_winequality_dataset, + winequality_transform, + winequality_invert_transform, + winequality_find_idx, +) + +from .models import ( + get_torchvision_model, + get_livertumor_model, + get_livertumor_model_from_hf, + Bert, + get_bert_model, + get_bert_tokenizer, + bert_collate_fn, + Vilt, + get_vilt_model, + get_vilt_processor, + vilt_collate_fn, + get_aki_model_from_hf, + get_ecg_resnet_from_hf, + get_ecg_patchtst_from_hf, + TorchModelForXGBoost, + get_winequality_model, +) + +__all__ = [ + # utils + 'set_seed', 'set_params', 'patch_lrp_explainer', 'save_params_csv', 'save_pickle_data', 'load_pickle_data', + 'img_to_np', 'denormalize_image', 'load_model_and_dataloader_for_tutorial', + + # datasets + 'ImageNetDataset', 'get_imagenet_dataset', + 'ImageNetValDataset', 'get_imagenet_val_dataset', + 'IMDBDataset', 'get_imdb_dataset', + 'VQADataset', 'get_vqa_dataset', + 'get_livertumor_dataset', 'get_livertumor_dataset_from_hf', + 'get_imagenet_sample_from_hf', + 'get_aki_dataset', 'get_ecg_dataset_from_hf', + 'get_winequality_dataset', 'winequality_transform', 'winequality_invert_transform', 'winequality_find_idx', + + # models + 'get_torchvision_model', 'get_livertumor_model', 'get_livertumor_model_from_hf', + 'Bert', 'get_bert_model', 'bert_collate_fn', 'get_bert_tokenizer', + 'Vilt', 'get_vilt_model', 'get_vilt_processor', 'vilt_collate_fn', + 'get_aki_model_from_hf', 'get_ecg_resnet_from_hf', 'get_ecg_patchtst_from_hf', + 'TorchModelForXGBoost', 'get_winequality_model', +] \ No newline at end of file diff --git a/experiments/utils/datasets.py b/experiments/utils/datasets.py new file mode 100644 index 0000000..4e66b96 --- /dev/null +++ b/experiments/utils/datasets.py @@ -0,0 +1,505 @@ +import os +import pickle +import json +import requests +from collections import defaultdict, deque +from typing import Optional, List, Tuple, Dict, Any +from tqdm import tqdm +from io import BytesIO +from PIL import Image +from pathlib import Path +from urllib3 import disable_warnings +from urllib3.exceptions import InsecureRequestWarning + +import numpy as np +import pandas as pd +from sklearn.preprocessing import StandardScaler, LabelEncoder + +import torch +from torch.utils.data import Dataset, Subset, DataLoader, TensorDataset +from torchvision import transforms +from huggingface_hub import hf_hub_download +from datasets import load_dataset + +from experiments.datasets import ( + LiverTumorDataset, + LiverTumorDatasetHf, + AKIDataset, + AKI_COLUMNS, +) + + +class ImageNetDataset(Dataset): + def __init__(self, root_dir, transform=None): + self.root_dir = root_dir + self.img_dir = os.path.join(self.root_dir, "samples/") + self.label_dir = os.path.join(self.root_dir, "imagenet_class_index.json") + + with open(self.label_dir) as json_data: + self.idx_to_labels = json.load(json_data) + + self.img_names = os.listdir(self.img_dir) + self.img_names.sort() + + self.transform = transform + + def __len__(self): + return len(self.img_names) + + def __getitem__(self, idx): + img_path = os.path.join(self.img_dir, self.img_names[idx]) + image = Image.open(img_path).convert("RGB") + label = idx + + if self.transform: + image = self.transform(image) + + return image, label + + def idx_to_label(self, idx): + return self.idx_to_labels[str(idx)][1] + + +def get_imagenet_dataset( + transform, + subset_size: int = 100, # ignored if indices is not None + root_dir="./data/ImageNet", + indices: Optional[List[int]] = None, +): + os.chdir(Path(__file__).parent) # ensure path + dataset = ImageNetDataset(root_dir=root_dir, transform=transform) + if indices is not None: + return Subset(dataset, indices=indices) + indices = list(range(len(dataset))) + subset = Subset(dataset, indices=indices[:subset_size]) + return subset + + +class ImageNetValDataset(Dataset): + def __init__(self, img_dir, label_file, class_index_file, transform=None): + self.img_dir = img_dir + self.transform = transform + self.image_files = sorted( + [f for f in os.listdir(img_dir) if f.endswith(".JPEG")] + ) + + with open(label_file, "r") as f: + self.labels = [line.strip() for line in f] + + with open(class_index_file, "r") as f: + self.class_index = json.load(f) + self.synset_to_idx = {v[0]: int(k) for k, v in self.class_index.items()} + + def __len__(self): + return len(self.image_files) + + def __getitem__(self, idx): + img_name = self.image_files[idx] + img_path = os.path.join(self.img_dir, img_name) + image = Image.open(img_path).convert("RGB") + label_synset = self.labels[idx].split(" ")[-1] + label = self.synset_to_idx[label_synset] + + if self.transform: + image = self.transform(image) + + return image, label + + +def get_imagenet_val_dataset(transform, root_dir): + img_dir = os.path.join(root_dir, "ImageNet1k", "val", "val") + info_dir = os.path.join(root_dir, "ImageNet1k_info") + val_label_file = os.path.join(info_dir, "ImageNet_val_label.txt") + val_class_index_file = os.path.join(info_dir, "ImageNet_class_index.json") + dataset = ImageNetValDataset( + img_dir, val_label_file, val_class_index_file, transform + ) + return dataset + + +class IMDBDataset(Dataset): + def __init__(self, split="test"): + super().__init__() + # data_iter = IMDB(split=split) + # self.annotations = [(line, label-1) for label, line in tqdm(data_iter)] + + def __len__(self): + return len(self.annotations) + + def __getitem__(self, idx): + return self.annotations[idx] + + +def get_imdb_dataset(split="test"): + return IMDBDataset(split=split) + + +disable_warnings(InsecureRequestWarning) + + +class VQADataset(Dataset): + def __init__(self): + super().__init__() + res = requests.get("https://visualqa.org/balanced_data.json") + self.annotations = eval(res.text) + + def __len__(self): + return len(self.annotations) + + def __getitem__(self, idx): + data = self.annotations[idx] + if isinstance(data["original_image"], str): + print(f"Requesting {data['original_image']}...") + res = requests.get(data["original_image"], verify=False) + img = Image.open(BytesIO(res.content)).convert("RGB") + data["original_image"] = img + return data["original_image"], data["question"], data["original_answer"] + + +def get_vqa_dataset(): + return VQADataset() + + +def get_livertumor_dataset( + transform, + subset_size: int = 100, # ignored if indices is not None + root_dir="./data/LiverTumor", + indices: Optional[List[int]] = None, +): + dataset = LiverTumorDataset(data_dir=root_dir, transform=transform) + if indices is not None: + return Subset(dataset, indices=indices) + indices = list(range(len(dataset))) + subset = Subset(dataset, indices=indices[:subset_size]) + return subset + + +def get_livertumor_dataset_from_hf( + transform, + hf_repo_id: str = "seongun/liver-tumor-classification", + indices: Optional[List[int]] = None, + data_root: str = "./data", + cache_dir: Optional[str] = None, +): + """ + Downloads only necessary files (metadata + images for requested indices) + from Hugging Face Hub using hf_hub_download and creates a PyTorch Dataset. + + Args: + transform: Torchvision transforms to apply to the image-like data. + hf_repo_id (str): Repository ID of the dataset on Hugging Face Hub. + indices (Optional[List[int]]): Absolute indices to select/download. + data_root (str): The root directory within the project to store datasets. + cache_dir (Optional[str]): Path to HF cache (used for intermediate downloads). + + Returns: + A PyTorch Dataset containing only the data for the requested indices. + """ + if indices is None: + print( + "Warning: No indices provided. Attempting to load metadata only, but image loading might fail later or be inefficient." + ) + + dataset_local_dir = os.path.join(data_root, hf_repo_id.replace("/", "_")) + os.makedirs(dataset_local_dir, exist_ok=True) + + print(f"Downloading metadata for '{hf_repo_id}' from Hugging Face Hub...") + try: + # 1. Download metadata.jsonl only + metadata_local_path = hf_hub_download( + repo_id=hf_repo_id, + filename="metadata.jsonl", + repo_type="dataset", + local_dir=dataset_local_dir, + local_dir_use_symlinks=True, + cache_dir=cache_dir, + ) + base_download_dir = dataset_local_dir + print(f"Metadata available at: {metadata_local_path}") + print(f"Base download/cache directory: {base_download_dir}") + + except Exception as e: + print(f"Failed to download metadata.jsonl from Hugging Face Hub: {e}") + raise e + + # 2. Read metadata and filter for requested indices + filtered_metadata = [] + required_image_paths = set() # Use set to avoid duplicate downloads + all_metadata = [] + try: + with open(metadata_local_path, "r") as f: + all_metadata = [json.loads(line.strip()) for line in f] + + if indices is not None: + num_total = len(all_metadata) + for idx in indices: + if 0 <= idx < num_total: + entry = all_metadata[idx] + filtered_metadata.append(entry) + required_image_paths.add(entry["sample_path"]) + required_image_paths.add(entry["w_sample_path"]) + required_image_paths.add(entry["mask_path"]) + else: + print( + f"Warning: Requested index {idx} is out of range (0-{num_total-1}). Skipping." + ) + else: + print( + "Warning: Loading without specific indices. Using all metadata entries." + ) + filtered_metadata = ( + all_metadata # Less efficient if not all images are needed later + ) + + except Exception as e: + print(f"Error reading or processing metadata file {metadata_local_path}: {e}") + raise e + + if not filtered_metadata: + raise ValueError("No valid metadata found for the requested indices.") + + # 3. Download only the required images + print( + f"Downloading {len(required_image_paths)} required image files (if not cached)..." + ) + for img_rel_path in tqdm(list(required_image_paths), desc="Downloading images"): + try: + # hf_hub_download will download to the cache or find existing file + hf_hub_download( + repo_id=hf_repo_id, + filename=img_rel_path, + repo_type="dataset", + local_dir=dataset_local_dir, + local_dir_use_symlinks=True, + cache_dir=cache_dir, + ) + except Exception as e: + print(f"Warning: Failed to download image file {img_rel_path}: {e}") + + print("Required image files downloaded/cached.") + + # 4. Create and return the Dataset using filtered metadata and base download dir + dataset = LiverTumorDatasetHf( + metadata=filtered_metadata, + base_download_dir=base_download_dir, + transform=transform, + ) + + print(f"Created dataset with {len(dataset)} instances.") + return dataset + + +def get_imagenet_sample_from_hf( + transform, + hf_repo_id: str = "geonhyeongkim/imagenet-samples-for-pnpxai-experiments", + indices: Optional[List[int]] = None, + data_root: str = "./data", + cache_dir: Optional[str] = None, +): + """ + Downloads only necessary files (metadata + images for requested indices) + from Hugging Face Hub using hf_hub_download and creates a PyTorch Dataset. + + Args: + transform: Torchvision transforms to apply to the image-like data. + hf_repo_id (str): Repository ID of the dataset on Hugging Face Hub. + indices (Optional[List[int]]): Absolute indices to select/download. + data_root (str): The root directory within the project to store datasets. + cache_dir (Optional[str]): Path to HF cache (used for intermediate downloads). + + Returns: + A PyTorch Dataset containing only the data for the requested indices. + """ + if indices is None: + print( + "Warning: No indices provided. Attempting to load metadata only, but image loading might fail later or be inefficient." + ) + + dataset_local_dir = os.path.join(data_root, hf_repo_id.replace("/", "_")) + os.makedirs(dataset_local_dir, exist_ok=True) + + print(f"Downloading metadata for '{hf_repo_id}' from Hugging Face Hub...") + try: + # 1. Download metadata.jsonl only + metadata_local_path = hf_hub_download( + repo_id=hf_repo_id, + filename="imagenet_class_index.json", + repo_type="dataset", + local_dir=dataset_local_dir, + local_dir_use_symlinks=True, + cache_dir=cache_dir, + ) + base_download_dir = dataset_local_dir + print(f"Metadata available at: {metadata_local_path}") + print(f"Base download/cache directory: {base_download_dir}") + + except Exception as e: + print( + f"Failed to download imagenet_class_index.json from Hugging Face Hub: {e}" + ) + raise e + + # 2. Read metadata and filter for requested indices + filtered_metadata = {} + required_image_paths = set() # Use set to avoid duplicate downloads + all_metadata = {} + try: + with open(metadata_local_path, "r") as f: + all_metadata = json.load(f) + + if indices is not None: + num_total = len(all_metadata) + for idx in indices: + if 0 <= idx < num_total: + metadata = all_metadata[str(idx)] + filtered_metadata[idx] = metadata + required_image_paths.add(f'samples/{"_".join(metadata)}.JPEG') + else: + print( + f"Warning: Requested index {idx} is out of range (0-{num_total-1}). Skipping." + ) + else: + print( + "Warning: Loading without specific indices. Using all metadata entries." + ) + filtered_metadata = { + int(k): all_metadata[k] for k in all_metadata + } # Less efficient if not all images are needed later + + except Exception as e: + print(f"Error reading or processing metadata file {metadata_local_path}: {e}") + raise e + + if not filtered_metadata: + raise ValueError("No valid metadata found for the requested indices.") + + # 3. Download only the required images + print( + f"Downloading {len(required_image_paths)} required image files (if not cached)..." + ) + for img_rel_path in tqdm(list(required_image_paths), desc="Downloading images"): + try: + # hf_hub_download will download to the cache or find existing file + hf_hub_download( + repo_id=hf_repo_id, + filename=img_rel_path, + repo_type="dataset", + local_dir=dataset_local_dir, + local_dir_use_symlinks=True, + cache_dir=cache_dir, + ) + except Exception as e: + print(f"Warning: Failed to download image file {img_rel_path}: {e}") + + print("Required image files downloaded/cached.") + + # 4. Create and return the Dataset using filtered metadata and base download dir + fp_img = os.path.join(base_download_dir, list(required_image_paths)[0]) + img = transform(Image.open(fp_img).convert("RGB")) + label = all_metadata[str(indices[0])][-1] + return img, label + + +def get_aki_dataset( + data_path: str = "data/mimiciii/formatted/data.csv", + test_split: float = 0.2, +) -> AKIDataset: + data = pd.read_csv(data_path) + + data = data.replace([np.inf, -np.inf], np.nan).dropna() + data = data[AKI_COLUMNS] + + scaler = StandardScaler() + scaler.fit(data.iloc[:, 2:]) + data.iloc[:, 2:] = scaler.transform(data.iloc[:, 2:]) + + n_entries = len(data) + df_test = data.iloc[-int(test_split * n_entries) :, :] + + x_data = df_test.drop(["AKI_STAGE_7DAY", "AKI"], axis=1).values + y_data = df_test["AKI_STAGE_7DAY"].values + + dataset = AKIDataset(x_data, y_data) + + return dataset + + +def get_ecg_dataset_from_hf(repo_id: str = "enver1323/ucr-twoleadecg") -> TensorDataset: + data = load_dataset(repo_id)["test"].with_format("numpy") + x_data = np.stack(data['segment']) + y_data = data['label'] + + encoder = LabelEncoder() + y_data = encoder.fit_transform(y_data) + + return TensorDataset( + torch.from_numpy(x_data), + torch.from_numpy(y_data) + ) + + +def get_winequality_dataset(data_dir: str = "data/wine_quality") -> Tuple: + """Load Wine Quality dataset and feature metadata.""" + data_path = Path(data_dir) + + X_train = np.load(data_path / "X_train.npy") + X_test = np.load(data_path / "X_test.npy") + y_train = np.load(data_path / "y_train.npy") + y_test = np.load(data_path / "y_test.npy") + + with open(data_path / "feature_metadata.pkl", "rb") as f: + feature_metadata = pickle.load(f) + + raw_data = pd.read_csv(data_path / "raw_data.csv") + + return X_train, X_test, y_train, y_test, feature_metadata, raw_data + + +def winequality_transform(X: pd.DataFrame, feature_metadata: Dict[str, Any]) -> np.ndarray: + """Transform raw data using feature metadata encoders.""" + input_data = [] + for k, v in feature_metadata.items(): + if np.isin('missing', X[[k]].values): + X[[k]] = X[[k]].replace("missing", v['encoder'].categories_[0][-1]) + preprocessed = v['encoder'].transform(X[[k]].values) + if v['type'] == 'categorical': + preprocessed = preprocessed.toarray() + input_data.append(preprocessed) + + input_array = np.concatenate(input_data, axis=1) + return input_array + + +def winequality_invert_transform(input_array: np.ndarray, feature_metadata: Dict[str, Any]) -> pd.DataFrame: + """Invert transformed data back to original feature space.""" + inverted_data = {} + + for col, meta in feature_metadata.items(): + if meta['type'] == 'categorical': + start_idx, end_idx = meta['index'][0], meta['index'][-1] + 1 + cat_data = input_array[:, start_idx:end_idx] + inverted_col = meta['encoder'].inverse_transform(cat_data) + inverted_data[col] = inverted_col.flatten() + else: + idx = meta['index'] + num_data = input_array[:, idx].reshape(-1, 1) + inverted_col = meta['encoder'].inverse_transform(num_data) + inverted_data[col] = inverted_col.flatten() + + return pd.DataFrame(inverted_data) + + +def winequality_find_idx(a: list, b: list) -> list: + """Find permutation index where a[idx] = b.""" + if sorted(a) != sorted(b): + return None + + pos_map = defaultdict(deque) + for i, val in enumerate(a): + pos_map[val].append(i) + + idx = [] + for val in b: + idx.append(pos_map[val].popleft()) + + return idx \ No newline at end of file diff --git a/experiments/utils/helpers.py b/experiments/utils/helpers.py deleted file mode 100644 index 6c93aa0..0000000 --- a/experiments/utils/helpers.py +++ /dev/null @@ -1,535 +0,0 @@ -from typing import Optional, List -import os -import json -import pickle -import dill -import requests -import functools -from tqdm import tqdm -from collections import OrderedDict -from io import BytesIO -from pathlib import Path -from urllib3 import disable_warnings -from urllib3.exceptions import InsecureRequestWarning - -import torch -import torchvision -from torch import Tensor -from torch.utils.data import Dataset, Subset, DataLoader -from torchvision import transforms -from transformers import BertTokenizer, BertForSequenceClassification -from transformers import ViltForQuestionAnswering, ViltProcessor -# from datasets import load_dataset -from huggingface_hub import hf_hub_download - -from experiments.models import ResNet50LiverTumor -from experiments.datasets import LiverTumorDataset, LiverTumorDatasetHf - -from PIL import Image - -import pdb - -# datasets - -class ImageNetDataset(Dataset): - def __init__(self, root_dir, transform=None): - self.root_dir = root_dir - self.img_dir = os.path.join(self.root_dir, 'samples/') - self.label_dir = os.path.join( - self.root_dir, 'imagenet_class_index.json') - - with open(self.label_dir) as json_data: - self.idx_to_labels = json.load(json_data) - - self.img_names = os.listdir(self.img_dir) - self.img_names.sort() - - self.transform = transform - - def __len__(self): - return len(self.img_names) - - def __getitem__(self, idx): - img_path = os.path.join(self.img_dir, self.img_names[idx]) - image = Image.open(img_path).convert('RGB') - label = idx - - if self.transform: - image = self.transform(image) - - return image, label - - def idx_to_label(self, idx): - return self.idx_to_labels[str(idx)][1] - - -def get_imagenet_dataset( - transform, - subset_size: int = 100, # ignored if indices is not None - root_dir="./data/ImageNet", - indices: Optional[List[int]] = None, -): - os.chdir(Path(__file__).parent) # ensure path - dataset = ImageNetDataset(root_dir=root_dir, transform=transform) - if indices is not None: - return Subset(dataset, indices=indices) - indices = list(range(len(dataset))) - subset = Subset(dataset, indices=indices[:subset_size]) - return subset - - -class ImageNetValDataset(Dataset): - def __init__(self, img_dir, label_file, class_index_file, transform=None): - self.img_dir = img_dir - self.transform = transform - self.image_files = sorted([f for f in os.listdir(img_dir) if f.endswith('.JPEG')]) - - with open(label_file, 'r') as f: - self.labels = [line.strip() for line in f] - - with open(class_index_file, 'r') as f: - self.class_index = json.load(f) - self.synset_to_idx = {v[0]: int(k) for k, v in self.class_index.items()} - - def __len__(self): - return len(self.image_files) - - def __getitem__(self, idx): - img_name = self.image_files[idx] - img_path = os.path.join(self.img_dir, img_name) - image = Image.open(img_path).convert('RGB') - label_synset = self.labels[idx].split(' ')[-1] - label = self.synset_to_idx[label_synset] - - if self.transform: - image = self.transform(image) - - return image, label - - -def get_imagenet_val_dataset(transform, root_dir): - img_dir = os.path.join(root_dir, 'ImageNet1k', 'val', 'val') - info_dir = os.path.join(root_dir, 'ImageNet1k_info') - val_label_file = os.path.join(info_dir, 'ImageNet_val_label.txt') - val_class_index_file = os.path.join(info_dir, 'ImageNet_class_index.json') - dataset = ImageNetValDataset(img_dir, val_label_file, val_class_index_file, transform) - return dataset - - -def save_params_csv(params, filepath='params.csv'): - """Saves the best parameters to a CSV file after optimization. - - Args: - params (dict): A dictionary containing the parameter names and values. - filepath (str, optional): The path to the CSV file. Defaults to 'params.csv'. - """ - try: - with open(filepath, 'w', newline='') as csvfile: - writer = csv.writer(csvfile) - # Write header row - writer.writerow(['parameter', 'value']) - # Write parameter name and value rows - for key, value in params.items(): - writer.writerow([key, value]) - print(f"Best parameters saved to: {filepath}") - except Exception as e: - print(f"Error saving best parameters to CSV: {e}") - - -def save_pickle_data(data, filepath='data.pkl'): - """Saves the data to a pickle file. - - Args: - data: A data variable to pickle. - filepath (str, optional): The path to the pickle file. Defaults to 'data.pkl'. - """ - try: - with open(filepath, 'wb') as f: - dill.dump(data, f) - print(f"Data saved to: {filepath}") - except Exception as e: - print(f"Error saving data to pickle file: {e}") - - -def load_pickle_data(filepath='data.pkl'): - """Loads the data from a pickle file. - - Args: - filepath (str, optional): The path to the pickle file. Defaults to 'data.pkl'. - - Returns: - Any: The loaded data, or None if the file is not found or an error occurs. - """ - try: - if os.path.exists(filepath): - with open(filepath, 'rb') as f: - data = dill.load(f) - print(f"data loaded from: {filepath}") - return data - else: - print(f"Warning: Pickle file not found at {filepath}") - return None - except Exception as e: - print(f"Error loading data from pickle file: {e}") - return None - - -class IMDBDataset(Dataset): - def __init__(self, split='test'): - super().__init__() - # data_iter = IMDB(split=split) - # self.annotations = [(line, label-1) for label, line in tqdm(data_iter)] - - def __len__(self): - return len(self.annotations) - - def __getitem__(self, idx): - return self.annotations[idx] - - -def get_imdb_dataset(split='test'): - return IMDBDataset(split=split) - -disable_warnings(InsecureRequestWarning) - - -class VQADataset(Dataset): - def __init__(self): - super().__init__() - res = requests.get('https://visualqa.org/balanced_data.json') - self.annotations = eval(res.text) - - def __len__(self): - return len(self.annotations) - - def __getitem__(self, idx): - data = self.annotations[idx] - if isinstance(data['original_image'], str): - print(f"Requesting {data['original_image']}...") - res = requests.get(data['original_image'], verify=False) - img = Image.open(BytesIO(res.content)).convert('RGB') - data['original_image'] = img - return data['original_image'], data['question'], data['original_answer'] - - -def get_vqa_dataset(): - return VQADataset() - - -def get_livertumor_dataset( - transform, - subset_size: int = 100, # ignored if indices is not None - root_dir="./data/LiverTumor", - indices: Optional[List[int]] = None, -): - dataset = LiverTumorDataset(data_dir=root_dir, transform=transform) - if indices is not None: - return Subset(dataset, indices=indices) - indices = list(range(len(dataset))) - subset = Subset(dataset, indices=indices[:subset_size]) - return subset - - -def get_livertumor_dataset_from_hf( - transform, - hf_repo_id: str = "seongun/liver-tumor-classification", - indices: Optional[List[int]] = None, - data_root: str = "./data", - cache_dir: Optional[str] = None, -): - """ - Downloads only necessary files (metadata + images for requested indices) - from Hugging Face Hub using hf_hub_download and creates a PyTorch Dataset. - - Args: - transform: Torchvision transforms to apply to the image-like data. - hf_repo_id (str): Repository ID of the dataset on Hugging Face Hub. - indices (Optional[List[int]]): Absolute indices to select/download. - data_root (str): The root directory within the project to store datasets. - cache_dir (Optional[str]): Path to HF cache (used for intermediate downloads). - - Returns: - A PyTorch Dataset containing only the data for the requested indices. - """ - if indices is None: - print("Warning: No indices provided. Attempting to load metadata only, but image loading might fail later or be inefficient.") - - dataset_local_dir = os.path.join(data_root, hf_repo_id.replace("/", "_")) - os.makedirs(dataset_local_dir, exist_ok=True) - - print(f"Downloading metadata for '{hf_repo_id}' from Hugging Face Hub...") - try: - # 1. Download metadata.jsonl only - metadata_local_path = hf_hub_download( - repo_id=hf_repo_id, - filename="metadata.jsonl", - repo_type="dataset", - local_dir=dataset_local_dir, - local_dir_use_symlinks=True, - cache_dir=cache_dir, - ) - base_download_dir = dataset_local_dir - print(f"Metadata available at: {metadata_local_path}") - print(f"Base download/cache directory: {base_download_dir}") - - except Exception as e: - print(f"Failed to download metadata.jsonl from Hugging Face Hub: {e}") - raise e - - # 2. Read metadata and filter for requested indices - filtered_metadata = [] - required_image_paths = set() # Use set to avoid duplicate downloads - all_metadata = [] - try: - with open(metadata_local_path, 'r') as f: - all_metadata = [json.loads(line.strip()) for line in f] - - if indices is not None: - num_total = len(all_metadata) - for idx in indices: - if 0 <= idx < num_total: - entry = all_metadata[idx] - filtered_metadata.append(entry) - required_image_paths.add(entry['sample_path']) - required_image_paths.add(entry['w_sample_path']) - required_image_paths.add(entry['mask_path']) - else: - print(f"Warning: Requested index {idx} is out of range (0-{num_total-1}). Skipping.") - else: - print("Warning: Loading without specific indices. Using all metadata entries.") - filtered_metadata = all_metadata # Less efficient if not all images are needed later - - except Exception as e: - print(f"Error reading or processing metadata file {metadata_local_path}: {e}") - raise e - - if not filtered_metadata: - raise ValueError("No valid metadata found for the requested indices.") - - # 3. Download only the required images - print(f"Downloading {len(required_image_paths)} required image files (if not cached)...") - for img_rel_path in tqdm(list(required_image_paths), desc="Downloading images"): - try: - # hf_hub_download will download to the cache or find existing file - hf_hub_download( - repo_id=hf_repo_id, - filename=img_rel_path, - repo_type="dataset", - local_dir=dataset_local_dir, - local_dir_use_symlinks=True, - cache_dir=cache_dir, - ) - except Exception as e: - print(f"Warning: Failed to download image file {img_rel_path}: {e}") - - print("Required image files downloaded/cached.") - - # 4. Create and return the Dataset using filtered metadata and base download dir - dataset = LiverTumorDatasetHf( - metadata=filtered_metadata, - base_download_dir=base_download_dir, - transform=transform - ) - - print(f"Created dataset with {len(dataset)} instances.") - return dataset - - -# models -def get_torchvision_model(model_name): - weights = torchvision.models.get_model_weights(model_name).DEFAULT - model = torchvision.models.get_model(model_name, weights=weights).eval() - transform = weights.transforms() - return model, transform - - -def get_livertumor_model(model_path): - model = ResNet50LiverTumor(in_channels=1, num_classes=2) - checkpoint = torch.load(model_path) - - state_dict = {k.replace('model.', '', 1).replace('module.', '', 1): v for k, v in checkpoint.items()} - model.load_state_dict(state_dict, strict=True) - model.eval() - - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Resize((224, 224), antialias=False), - ]) - - return model, transform - - -def get_livertumor_model_from_hf(repo_id="seongun/resnet50-livertumor"): - model = ResNet50LiverTumor.from_pretrained(repo_id) - model.eval() - - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Resize((224, 224), antialias=False), - ]) - - return model, transform - - -class Bert(BertForSequenceClassification): - def forward(self, input_ids, token_type_ids, attention_mask): - return super().forward( - input_ids=input_ids, - token_type_ids=token_type_ids, - attention_mask=attention_mask - ).logits - - -def get_bert_model(model_name, num_labels): - return Bert.from_pretrained(model_name, num_labels=num_labels) - - -class Vilt(ViltForQuestionAnswering): - def forward( - self, - pixel_values, - input_ids, - token_type_ids, - attention_mask, - pixel_mask, - ): - return super().forward( - input_ids=input_ids, - token_type_ids=token_type_ids, - attention_mask=attention_mask, - pixel_values=pixel_values, - pixel_mask=pixel_mask, - ).logits - - -def get_vilt_model(model_name): - return Vilt.from_pretrained(model_name) - - -# utils -def img_to_np(img): return img.permute(1, 2, 0).detach().numpy() - - -def denormalize_image(inputs, mean, std): - return img_to_np( - inputs - * Tensor(std)[:, None, None] - + Tensor(mean)[:, None, None] - ) - - -def bert_collate_fn(batch, tokenizer=None): - inputs = tokenizer( - [d[0] for d in batch], - padding=True, - truncation=True, - return_tensors='pt', - ) - labels = torch.tensor([d[1] for d in batch]) - return tuple(inputs.values()), labels - - -def get_bert_tokenizer(model_name): - return BertTokenizer.from_pretrained(model_name) - - -def get_vilt_processor(model_name): - return ViltProcessor.from_pretrained(model_name) - - -def vilt_collate_fn(batch, processor=None, label2id=None): - imgs = [d[0] for d in batch] - qsts = [d[1] for d in batch] - inputs = processor( - images=imgs, - text=qsts, - padding=True, - truncation=True, - return_tensors='pt', - ) - labels = torch.tensor([label2id[d[2]] for d in batch]) - return ( - inputs['pixel_values'], - inputs['input_ids'], - inputs['token_type_ids'], - inputs['attention_mask'], - inputs['pixel_mask'], - labels, - ) - - -def load_model_and_dataloader_for_tutorial(modality, device): - if modality == 'image': - model, transform = get_torchvision_model('resnet18') - model = model.to(device) - model.eval() - dataset = get_imagenet_dataset(transform) - loader = DataLoader(dataset, batch_size=8, shuffle=False) - return model, loader, transform - elif modality == 'text': - model = get_bert_model( - 'fabriceyhc/bert-base-uncased-imdb', num_labels=2) - model = model.to(device) - model.eval() - dataset = get_imdb_dataset(split='test') - tokenizer = get_bert_tokenizer('fabriceyhc/bert-base-uncased-imdb') - loader = DataLoader( - dataset, - batch_size=8, - shuffle=False, - collate_fn=functools.partial(bert_collate_fn, tokenizer=tokenizer) - ) - return model, loader, tokenizer - elif modality == ('image', 'text'): - model = get_vilt_model('dandelin/vilt-b32-finetuned-vqa') - model.to(device) - model.eval() - dataset = get_vqa_dataset() - processor = get_vilt_processor('dandelin/vilt-b32-finetuned-vqa') - loader = DataLoader( - dataset, - batch_size=2, - shuffle=False, - collate_fn=functools.partial( - vilt_collate_fn, - processor=processor, - label2id=model.config.label2id, - ), - ) - return model, loader, processor - - -import random -import numpy as np - -def set_seed(seed): - """Sets the seed for various random number generators and CUDA settings for reproducibility.""" - seed = int(seed) - - # 1. Basic random libraries - random.seed(seed) - np.random.seed(seed) - - # 2. PyTorch - torch.manual_seed(seed) - - # 3. CUDA (GPU) related - if torch.cuda.is_available(): - torch.cuda.manual_seed(seed) # Set seed for the current GPU - torch.cuda.manual_seed_all(seed) # Set seed for *all* GPUs (important for multi-GPU setups) - - # Ensure deterministic algorithms are used for CUDA operations - torch.backends.cudnn.deterministic = True - torch.backends.cudnn.benchmark = False - - # (Optional but recommended for strict reproducibility) - # Force PyTorch to use deterministic algorithms (might impact performance) - # Note: This might require PyTorch 1.7+ and setting an environment variable - # before running the script (e.g., export CUBLAS_WORKSPACE_CONFIG=:4096:8) - # Alternatively, try setting it directly (requires PyTorch 1.8+): - torch.use_deterministic_algorithms(True, warn_only=True) - - # (Optional: Set environment variable directly in script - might not always work depending on when CUDA context is initialized) - # os.environ['CUBLAS_WORKSPACE_CONFIG'] = ':4096:8' - - print(f"Set seed to {seed} for random, numpy, and torch (including CUDA if available).") \ No newline at end of file diff --git a/experiments/utils/models.py b/experiments/utils/models.py new file mode 100644 index 0000000..2e5fb08 --- /dev/null +++ b/experiments/utils/models.py @@ -0,0 +1,198 @@ +import torch +import torchvision +import torch.nn as nn +from torchvision import transforms +from transformers import BertTokenizer, BertForSequenceClassification +from transformers import ViltForQuestionAnswering, ViltProcessor + +import xgboost as xgb +from typing import Union +from pathlib import Path + +from experiments.models import ResNet50LiverTumor, AKIClassifier, ResNetPlus, PatchTST, TabResNet + + +def get_torchvision_model(model_name): + weights = torchvision.models.get_model_weights(model_name).DEFAULT + model = torchvision.models.get_model(model_name, weights=weights).eval() + transform = weights.transforms() + return model, transform + + +def get_livertumor_model(model_path): + model = ResNet50LiverTumor(in_channels=1, num_classes=2) + checkpoint = torch.load(model_path) + + state_dict = { + k.replace("model.", "", 1).replace("module.", "", 1): v + for k, v in checkpoint.items() + } + model.load_state_dict(state_dict, strict=True) + model.eval() + + transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Resize((224, 224), antialias=False), + ] + ) + + return model, transform + + +def get_livertumor_model_from_hf(repo_id="seongun/resnet50-livertumor"): + model = ResNet50LiverTumor.from_pretrained(repo_id) + model.eval() + + transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Resize((224, 224), antialias=False), + ] + ) + + return model, transform + + +class Bert(BertForSequenceClassification): + def forward(self, input_ids, token_type_ids, attention_mask): + return ( + super() + .forward( + input_ids=input_ids, + token_type_ids=token_type_ids, + attention_mask=attention_mask, + ) + .logits + ) + + +def get_bert_model(model_name, num_labels): + return Bert.from_pretrained(model_name, num_labels=num_labels) + + +def bert_collate_fn(batch, tokenizer=None): + inputs = tokenizer( + [d[0] for d in batch], + padding=True, + truncation=True, + return_tensors="pt", + ) + labels = torch.tensor([d[1] for d in batch]) + return tuple(inputs.values()), labels + + +def get_bert_tokenizer(model_name): + return BertTokenizer.from_pretrained(model_name) + + +class Vilt(ViltForQuestionAnswering): + def forward( + self, + pixel_values, + input_ids, + token_type_ids, + attention_mask, + pixel_mask, + ): + return ( + super() + .forward( + input_ids=input_ids, + token_type_ids=token_type_ids, + attention_mask=attention_mask, + pixel_values=pixel_values, + pixel_mask=pixel_mask, + ) + .logits + ) + + +def get_vilt_model(model_name): + return Vilt.from_pretrained(model_name) + + +def get_vilt_processor(model_name): + return ViltProcessor.from_pretrained(model_name) + + +def vilt_collate_fn(batch, processor=None, label2id=None): + imgs = [d[0] for d in batch] + qsts = [d[1] for d in batch] + inputs = processor( + images=imgs, + text=qsts, + padding=True, + truncation=True, + return_tensors="pt", + ) + labels = torch.tensor([label2id[d[2]] for d in batch]) + return ( + inputs["pixel_values"], + inputs["input_ids"], + inputs["token_type_ids"], + inputs["attention_mask"], + inputs["pixel_mask"], + labels, + ) + + +def get_aki_model_from_hf(repo_id: str = "enver1323/aki-classifier") -> AKIClassifier: + return AKIClassifier.from_pretrained(repo_id) + + +def get_ecg_resnet_from_hf(repo_id: str = "enver1323/resnetplus-classification-ecg") -> ResNetPlus: + return ResNetPlus.from_pretrained(repo_id) + + +def get_ecg_patchtst_from_hf(repo_id: str = "enver1323/patchtst-classification-ecg") -> PatchTST: + return PatchTST.from_pretrained(repo_id) + + +class TorchModelForXGBoost(nn.Module): + """PyTorch wrapper for XGBoost models.""" + + def __init__(self, xgb_model: xgb.XGBClassifier): + super().__init__() + self.xgb_model = xgb_model + self._dummy_layer = nn.Linear(1, 1) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + if x.ndim >= 3: + x = x.squeeze(0) + + if x.requires_grad: + out = self.xgb_model.predict_proba(x.detach().cpu().numpy()) + else: + out = self.xgb_model.predict_proba(x.cpu().numpy()) + + return torch.from_numpy(out) + + +def get_winequality_model( + model_type: str, + model_path: Union[str, Path], + input_dim: int = None, + output_dim: int = 2, + **kwargs +) -> Union[xgb.XGBClassifier, nn.Module]: + """Load a trained model.""" + model_path = Path(model_path) + + if model_type == "xgb": + model = xgb.XGBClassifier() + model.load_model(str(model_path)) + return TorchModelForXGBoost(model) + + elif model_type == "tab_resnet": + if input_dim is None: + raise ValueError("input_dim is required for tab_resnet model") + + num_blocks = kwargs.get('num_blocks', 1) + model = TabResNet(input_dim, output_dim, num_blocks=num_blocks) + model.load_state_dict(torch.load(model_path)) + model.eval() + return model + + else: + raise ValueError(f"Unknown model type: {model_type}") \ No newline at end of file diff --git a/experiments/utils/utils.py b/experiments/utils/utils.py new file mode 100644 index 0000000..bbc94d2 --- /dev/null +++ b/experiments/utils/utils.py @@ -0,0 +1,311 @@ +from typing import Dict, Any, Tuple +import os +import pickle +import dill +import functools +import random +import inspect +import numpy as np + +import torch +from torch import Tensor +from torch.utils.data import Dataset, Subset, DataLoader +import zennit + +from pnpxai.core.modality.modality import Modality +from pnpxai.explainers import Explainer +from pnpxai.explainers.lrp import ( + LRPBase, + LRPEpsilonGammaBox, + LRPUniformEpsilon, + LRPEpsilonPlus, + LRPEpsilonAlpha2Beta1, + _get_uniform_epsilon_composite, + _get_epsilon_gamma_box_composite, + _get_epsilon_plus_composite, + _get_epsilon_alpha2_beta1_composite +) + + +def save_params_csv(params, filepath='params.csv'): + """Saves the best parameters to a CSV file after optimization. + + Args: + params (dict): A dictionary containing the parameter names and values. + filepath (str, optional): The path to the CSV file. Defaults to 'params.csv'. + """ + try: + with open(filepath, 'w', newline='') as csvfile: + writer = csv.writer(csvfile) + # Write header row + writer.writerow(['parameter', 'value']) + # Write parameter name and value rows + for key, value in params.items(): + writer.writerow([key, value]) + print(f"Best parameters saved to: {filepath}") + except Exception as e: + print(f"Error saving best parameters to CSV: {e}") + + +def save_pickle_data(data, filepath='data.pkl'): + """Saves the data to a pickle file. + + Args: + data: A data variable to pickle. + filepath (str, optional): The path to the pickle file. Defaults to 'data.pkl'. + """ + try: + with open(filepath, 'wb') as f: + dill.dump(data, f) + print(f"Data saved to: {filepath}") + except Exception as e: + print(f"Error saving data to pickle file: {e}") + + +def load_pickle_data(filepath='data.pkl'): + """Loads the data from a pickle file. + + Args: + filepath (str, optional): The path to the pickle file. Defaults to 'data.pkl'. + + Returns: + Any: The loaded data, or None if the file is not found or an error occurs. + """ + try: + if os.path.exists(filepath): + with open(filepath, 'rb') as f: + data = dill.load(f) + print(f"data loaded from: {filepath}") + return data + else: + print(f"Warning: Pickle file not found at {filepath}") + return None + except Exception as e: + print(f"Error loading data from pickle file: {e}") + return None + + +# utils +def img_to_np(img): return img.permute(1, 2, 0).detach().numpy() + + +def denormalize_image(inputs, mean, std): + return img_to_np( + inputs + * Tensor(std)[:, None, None] + + Tensor(mean)[:, None, None] + ) + + +def load_model_and_dataloader_for_tutorial(modality, device): + if modality == 'image': + model, transform = get_torchvision_model('resnet18') + model = model.to(device) + model.eval() + dataset = get_imagenet_dataset(transform) + loader = DataLoader(dataset, batch_size=8, shuffle=False) + return model, loader, transform + elif modality == 'text': + model = get_bert_model( + 'fabriceyhc/bert-base-uncased-imdb', num_labels=2) + model = model.to(device) + model.eval() + dataset = get_imdb_dataset(split='test') + tokenizer = get_bert_tokenizer('fabriceyhc/bert-base-uncased-imdb') + loader = DataLoader( + dataset, + batch_size=8, + shuffle=False, + collate_fn=functools.partial(bert_collate_fn, tokenizer=tokenizer) + ) + return model, loader, tokenizer + elif modality == ('image', 'text'): + model = get_vilt_model('dandelin/vilt-b32-finetuned-vqa') + model.to(device) + model.eval() + dataset = get_vqa_dataset() + processor = get_vilt_processor('dandelin/vilt-b32-finetuned-vqa') + loader = DataLoader( + dataset, + batch_size=2, + shuffle=False, + collate_fn=functools.partial( + vilt_collate_fn, + processor=processor, + label2id=model.config.label2id, + ), + ) + return model, loader, processor + + +def set_seed(seed): + """Sets the seed for various random number generators and CUDA settings for reproducibility.""" + seed = int(seed) + + # 1. Basic random libraries + random.seed(seed) + np.random.seed(seed) + + # 2. PyTorch + torch.manual_seed(seed) + + # 3. CUDA (GPU) related + if torch.cuda.is_available(): + torch.cuda.manual_seed(seed) # Set seed for the current GPU + torch.cuda.manual_seed_all(seed) # Set seed for *all* GPUs (important for multi-GPU setups) + + # Ensure deterministic algorithms are used for CUDA operations + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + # (Optional but recommended for strict reproducibility) + # Force PyTorch to use deterministic algorithms (might impact performance) + # Note: This might require PyTorch 1.7+ and setting an environment variable + # before running the script (e.g., export CUBLAS_WORKSPACE_CONFIG=:4096:8) + # Alternatively, try setting it directly (requires PyTorch 1.8+): + torch.use_deterministic_algorithms(True, warn_only=True) + + # (Optional: Set environment variable directly in script - might not always work depending on when CUDA context is initialized) + # os.environ['CUBLAS_WORKSPACE_CONFIG'] = ':4096:8' + + print(f"Set seed to {seed} for random, numpy, and torch (including CUDA if available).") + + +def set_params(params: Dict[str, Any], modality: Modality) -> Tuple[Dict[str, Any], Dict[str, Any]]: + """ + Processes a flat parameter dictionary and splits it for explainers and postprocessors. + + This function takes a flat dictionary of parameters and the experiment's + modality object. It uses the modality's internal 'FunctionSelectors' + (e.g., `modality.baseline_fn_selector`) to correctly instantiate callable + functions (like `baseline_fn` or `feature_mask_fn`) from their string names. + + It also handles nested parameters (e.g., 'baseline_fn.n_samples') by parsing + them and passing them as arguments to the selector. + + Finally, it splits all parameters into two dictionaries: + 1. `explainer_kwargs`: For the explainer (e.g., `n_steps`, `baseline_fn`). + 2. `post_kwargs`: For the postprocessor (e.g., `pooling_method`). + """ + # Define keys that belong to the postprocessor, not the explainer + POSTPROCESS_PARAM_KEYS = {'pooling_method', 'normalization_method'} + + # Separate explainer kwargs from postprocessor kwargs + explainer_kwargs = {k: v for k, v in params.items() if k not in POSTPROCESS_PARAM_KEYS} + post_kwargs = {k: v for k, v in params.items() if k in POSTPROCESS_PARAM_KEYS} + + # Separate nested parameters + all_nested_baseline_params = {} + all_nested_mask_params = {} + cleaned_explainer_kwargs = {} + + for k, v in list(explainer_kwargs.items()): + if k.startswith('baseline_fn.'): + all_nested_baseline_params[k.replace('baseline_fn.', '')] = v + elif k.startswith('feature_mask_fn.'): + all_nested_mask_params[k.replace('feature_mask_fn.', '')] = v + else: + cleaned_explainer_kwargs[k] = v + explainer_kwargs = cleaned_explainer_kwargs + + # Handle baseline_fn instantiation using the modality's selector + baseline_fn_str = explainer_kwargs.get('baseline_fn') + if baseline_fn_str and isinstance(baseline_fn_str, str): + try: + # Use the modality's selector to instantiate the baseline_fn + instance = modality.baseline_fn_selector.select( + baseline_fn_str, + **all_nested_baseline_params + ) + + # Wrap in a tuple for multi-modality compatibility + explainer_kwargs['baseline_fn'] = (instance,) + print(f" Instantiated baseline_fn: {baseline_fn_str} -> {type(instance)}") + except Exception as e: + print(f" Error instantiating baseline '{baseline_fn_str}': {e}. Removing key.") + del explainer_kwargs['baseline_fn'] + + # Handle feature_mask_fn instantiation using the modality's selector + feature_mask_fn_str = explainer_kwargs.get('feature_mask_fn') + if feature_mask_fn_str and isinstance(feature_mask_fn_str, str): + try: + # Use the modality's selector to instantiate the feature_mask_fn + instance = modality.feature_mask_fn_selector.select( + feature_mask_fn_str, + **all_nested_mask_params + ) + + # Wrap in a tuple + explainer_kwargs['feature_mask_fn'] = (instance,) + print(f" Instantiated feature_mask_fn: {feature_mask_fn_str} -> {type(instance)}") + except Exception as e: + print(f" Error instantiating feature mask '{feature_mask_fn_str}': {e}. Removing key.") + del explainer_kwargs['feature_mask_fn'] + + post_kwargs['channel_dim'] = modality.channel_dim + + return explainer_kwargs, post_kwargs + + +def patch_lrp_explainer(explainer: Explainer) -> Explainer: + """ + Utility function to synchronize pnpxai LRP explainers after `.set_kwargs()`. + + This function addresses an initialization behavior where pnpxai LRP wrappers + create their internal `zennit_composite` object during `__init__` using + default parameters. + + A subsequent `.set_kwargs()` updates the wrapper's attributes (e.g., `explainer.epsilon`), + but does not automatically propagate these changes to the internal `zennit_composite` + object, which was created during `__init__` with the default parameters. + + This utility manually recreates the `zennit_composite` after `.set_kwargs()` + by calling the same private helper functions used in `lrp.py`'s `__init__`, + but this time feeding them the updated attributes from the explainer instance. + + Args: + explainer: The LRP explainer instance (e.g., LRPEpsilonGammaBox) + that has just been configured with `.set_kwargs()`. + + Returns: + The same explainer instance, now updated with a `zennit_composite` object + reflecting the new parameters. + """ + + new_composite = None + explainer_class_name = explainer.__class__.__name__ + + if isinstance(explainer, LRPEpsilonGammaBox): + new_composite = _get_epsilon_gamma_box_composite( + low=explainer.low, + high=explainer.high, + epsilon=explainer.epsilon, # Use the UPDATED attribute + gamma=explainer.gamma, # Use the UPDATED attribute + stabilizer=explainer.stabilizer, + zennit_canonizers=explainer.zennit_canonizers + ) + elif isinstance(explainer, LRPUniformEpsilon): + new_composite = _get_uniform_epsilon_composite( + epsilon=explainer.epsilon, # Use the UPDATED attribute + stabilizer=explainer.stabilizer, + zennit_canonizers=explainer.zennit_canonizers + ) + elif isinstance(explainer, LRPEpsilonPlus): + new_composite = _get_epsilon_plus_composite( + epsilon=explainer.epsilon, # Use the UPDATED attribute + stabilizer=explainer.stabilizer, + zennit_canonizers=explainer.zennit_canonizers + ) + elif isinstance(explainer, LRPEpsilonAlpha2Beta1): + new_composite = _get_epsilon_alpha2_beta1_composite( + epsilon=explainer.epsilon, # Use the UPDATED attribute + stabilizer=explainer.stabilizer, + zennit_canonizers=explainer.zennit_canonizers + ) + else: + return explainer + + if new_composite is not None: + explainer.zennit_composite = new_composite + + return explainer \ No newline at end of file diff --git a/setup.py b/setup.py index 45ddeb1..1446cf7 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name='pnpxai-experiments', version='0.1.0', - author='Seongun Kim', + author='Seongun Kim, Geonhyeong Kim, Enver Menadjiev, Chanwoo Lee', author_email='seongun@kaist.ac.kr', description='Experiments using the PnPXAI library.', long_description=long_description,