submission_id: jic062-dpo-v1-8-c500_v1
developer_uid: chace9580
best_of: 8
celo_rating: 1255.86
display_name: jic062-dpo-v1-8-c500_v1
family_friendly_score: 0.5590638930163447
family_friendly_standard_error: 0.009542207523935187
formatter: {'memory_template': '[INST]system\n{memory}[/INST]\n', 'prompt_template': '[INST]user\n{prompt}[/INST]\n', 'bot_template': '[INST]assistant\n{bot_name}: {message}[/INST]\n', 'user_template': '[INST]user\n{user_name}: {message}[/INST]\n', 'response_template': '[INST]assistant\n{bot_name}:', 'truncate_by_message': False}
generation_params: {'temperature': 1.0, 'top_p': 0.9, 'min_p': 0.05, 'top_k': 80, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['\n', '/s', '[/INST]'], 'max_input_tokens': 1024, 'best_of': 8, 'max_output_tokens': 64}
gpu_counts: {'NVIDIA RTX A5000': 1}
ineligible_reason: num_battles<5000
is_internal_developer: False
language_model: jic062/dpo-v1.8-c500
latencies: [{'batch_size': 1, 'throughput': 0.6131394695273903, 'latency_mean': 1.6308904027938842, 'latency_p50': 1.6340969800949097, 'latency_p90': 1.7914819240570068}, {'batch_size': 3, 'throughput': 1.096421379887012, 'latency_mean': 2.726158376932144, 'latency_p50': 2.719970703125, 'latency_p90': 2.98418128490448}, {'batch_size': 5, 'throughput': 1.2499619342803197, 'latency_mean': 3.9799257314205168, 'latency_p50': 3.962437152862549, 'latency_p90': 4.472551536560059}, {'batch_size': 6, 'throughput': 1.267778565109742, 'latency_mean': 4.714811927080154, 'latency_p50': 4.710913777351379, 'latency_p90': 5.218051528930664}, {'batch_size': 8, 'throughput': 1.2534331088605513, 'latency_mean': 6.350632419586182, 'latency_p50': 6.357896685600281, 'latency_p90': 7.11754093170166}, {'batch_size': 10, 'throughput': 1.2185786473919782, 'latency_mean': 8.161975684165954, 'latency_p50': 8.154783368110657, 'latency_p90': 9.294791674613952}]
max_input_tokens: 1024
max_output_tokens: 64
model_architecture: MistralForCausalLM
model_group: jic062/dpo-v1.8-c500
model_name: jic062-dpo-v1-8-c500_v1
model_num_parameters: 12772070400.0
model_repo: jic062/dpo-v1.8-c500
model_size: 13B
num_battles: 2761
num_wins: 1394
ranking_group: single
status: torndown
submission_type: basic
throughput_3p7s: 1.23
timestamp: 2024-09-25T05:40:48+00:00
us_pacific_date: 2024-09-24
win_ratio: 0.504889532777979
Resubmit model
Shutdown handler not registered because Python interpreter is not running in the main thread
run pipeline %s
run pipeline stage %s
Running pipeline stage MKMLizer
Starting job with name jic062-dpo-v1-8-c500-v1-mkmlizer
Waiting for job on jic062-dpo-v1-8-c500-v1-mkmlizer to finish
jic062-dpo-v1-8-c500-v1-mkmlizer: ╔═════════════════════════════════════════════════════════════════════╗
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ _____ __ __ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ / _/ /_ ___ __/ / ___ ___ / / ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ / _/ / // / |/|/ / _ \/ -_) -_) / ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ /_//_/\_, /|__,__/_//_/\__/\__/_/ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ /___/ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ Version: 0.11.12 ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ Copyright 2023 MK ONE TECHNOLOGIES Inc. ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ https://mk1.ai ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ The license key for the current software has been verified as ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ belonging to: ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ Chai Research Corp. ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ Account ID: 7997a29f-0ceb-4cc7-9adf-840c57b4ae6f ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ Expiration: 2024-10-15 23:59:59 ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ║ ║
jic062-dpo-v1-8-c500-v1-mkmlizer: ╚═════════════════════════════════════════════════════════════════════╝
jic062-dpo-v1-8-c500-v1-mkmlizer: Downloaded to shared memory in 52.694s
jic062-dpo-v1-8-c500-v1-mkmlizer: quantizing model to /dev/shm/model_cache, profile:s0, folder:/tmp/tmpud3lqk6r, device:0
jic062-dpo-v1-8-c500-v1-mkmlizer: Saving flywheel model at /dev/shm/model_cache
Connection pool is full, discarding connection: %s. Connection pool size: %s
jic062-dpo-v1-8-c500-v1-mkmlizer: quantized model in 39.176s
jic062-dpo-v1-8-c500-v1-mkmlizer: Processed model jic062/dpo-v1.8-c500 in 91.870s
jic062-dpo-v1-8-c500-v1-mkmlizer: creating bucket guanaco-mkml-models
jic062-dpo-v1-8-c500-v1-mkmlizer: Bucket 's3://guanaco-mkml-models/' created
jic062-dpo-v1-8-c500-v1-mkmlizer: uploading /dev/shm/model_cache to s3://guanaco-mkml-models/jic062-dpo-v1-8-c500-v1
jic062-dpo-v1-8-c500-v1-mkmlizer: cp /dev/shm/model_cache/config.json s3://guanaco-mkml-models/jic062-dpo-v1-8-c500-v1/config.json
jic062-dpo-v1-8-c500-v1-mkmlizer: cp /dev/shm/model_cache/special_tokens_map.json s3://guanaco-mkml-models/jic062-dpo-v1-8-c500-v1/special_tokens_map.json
jic062-dpo-v1-8-c500-v1-mkmlizer: cp /dev/shm/model_cache/tokenizer_config.json s3://guanaco-mkml-models/jic062-dpo-v1-8-c500-v1/tokenizer_config.json
jic062-dpo-v1-8-c500-v1-mkmlizer: cp /dev/shm/model_cache/tokenizer.json s3://guanaco-mkml-models/jic062-dpo-v1-8-c500-v1/tokenizer.json
jic062-dpo-v1-8-c500-v1-mkmlizer: cp /dev/shm/model_cache/flywheel_model.0.safetensors s3://guanaco-mkml-models/jic062-dpo-v1-8-c500-v1/flywheel_model.0.safetensors
jic062-dpo-v1-8-c500-v1-mkmlizer: Loading 0: 0%| | 0/363 [00:00<?, ?it/s] Loading 0: 1%|▏ | 5/363 [00:00<00:12, 29.04it/s] Loading 0: 3%|▎ | 12/363 [00:00<00:07, 47.11it/s] Loading 0: 5%|▍ | 18/363 [00:00<00:07, 45.72it/s] Loading 0: 6%|▋ | 23/363 [00:00<00:09, 35.12it/s] Loading 0: 8%|▊ | 30/363 [00:00<00:07, 43.97it/s] Loading 0: 10%|▉ | 35/363 [00:00<00:07, 41.05it/s] Loading 0: 11%|█ | 40/363 [00:00<00:08, 39.88it/s] Loading 0: 12%|█▏ | 45/363 [00:01<00:07, 40.90it/s] Loading 0: 14%|█▍ | 50/363 [00:01<00:09, 33.34it/s] Loading 0: 15%|█▌ | 56/363 [00:01<00:08, 37.52it/s] Loading 0: 17%|█▋ | 61/363 [00:01<00:11, 25.40it/s] Loading 0: 18%|█▊ | 65/363 [00:01<00:11, 24.99it/s] Loading 0: 19%|█▉ | 69/363 [00:02<00:10, 27.42it/s] Loading 0: 20%|██ | 73/363 [00:02<00:11, 26.19it/s] Loading 0: 21%|██▏ | 78/363 [00:02<00:09, 30.71it/s] Loading 0: 23%|██▎ | 82/363 [00:02<00:09, 29.10it/s] Loading 0: 24%|██▍ | 87/363 [00:02<00:08, 33.33it/s] Loading 0: 25%|██▌ | 91/363 [00:02<00:08, 30.68it/s] Loading 0: 27%|██▋ | 98/363 [00:02<00:07, 37.76it/s] Loading 0: 28%|██▊ | 103/363 [00:03<00:07, 36.73it/s] Loading 0: 29%|██▉ | 107/363 [00:03<00:07, 34.74it/s] Loading 0: 31%|███ | 112/363 [00:03<00:06, 36.17it/s] Loading 0: 32%|███▏ | 116/363 [00:03<00:07, 33.41it/s] Loading 0: 33%|███▎ | 120/363 [00:03<00:07, 31.76it/s] Loading 0: 34%|███▍ | 124/363 [00:03<00:07, 32.92it/s] Loading 0: 35%|███▌ | 128/363 [00:03<00:08, 29.16it/s] Loading 0: 36%|███▋ | 132/363 [00:03<00:07, 31.23it/s] Loading 0: 37%|███▋ | 136/363 [00:04<00:08, 28.14it/s] Loading 0: 39%|███▉ | 141/363 [00:04<00:06, 32.56it/s] Loading 0: 40%|███▉ | 145/363 [00:04<00:11, 19.78it/s] Loading 0: 41%|████ | 149/363 [00:04<00:10, 20.49it/s] Loading 0: 43%|████▎ | 156/363 [00:04<00:07, 27.74it/s] Loading 0: 44%|████▍ | 160/363 [00:05<00:07, 28.58it/s] Loading 0: 45%|████▌ | 165/363 [00:05<00:06, 31.59it/s] Loading 0: 47%|████▋ | 169/363 [00:05<00:05, 32.36it/s] Loading 0: 48%|████▊ | 175/363 [00:05<00:05, 37.26it/s] Loading 0: 50%|████▉ | 180/363 [00:05<00:04, 39.53it/s] Loading 0: 51%|█████ | 185/363 [00:05<00:05, 33.66it/s] Loading 0: 53%|█████▎ | 192/363 [00:05<00:04, 41.56it/s] Loading 0: 54%|█████▍ | 197/363 [00:05<00:03, 42.59it/s] Loading 0: 56%|█████▌ | 202/363 [00:06<00:03, 41.74it/s] Loading 0: 57%|█████▋ | 208/363 [00:06<00:04, 38.54it/s] Loading 0: 59%|█████▊ | 213/363 [00:06<00:03, 39.07it/s] Loading 0: 60%|██████ | 218/363 [00:06<00:03, 39.99it/s] Loading 0: 61%|██████▏ | 223/363 [00:06<00:04, 30.18it/s] Loading 0: 63%|██████▎ | 227/363 [00:06<00:04, 31.07it/s] Loading 0: 64%|██████▎ | 231/363 [00:07<00:04, 29.00it/s] Loading 0: 65%|██████▍ | 235/363 [00:07<00:04, 30.73it/s] Loading 0: 66%|██████▌ | 239/363 [00:07<00:04, 29.36it/s] Loading 0: 68%|██████▊ | 246/363 [00:07<00:03, 37.40it/s] Loading 0: 69%|██████▉ | 251/363 [00:07<00:02, 37.72it/s] Loading 0: 70%|███████ | 255/363 [00:07<00:02, 37.52it/s] Loading 0: 71%|███████▏ | 259/363 [00:07<00:02, 36.00it/s] Loading 0: 73%|███████▎ | 264/363 [00:07<00:02, 38.85it/s] Loading 0: 74%|███████▍ | 268/363 [00:08<00:02, 37.17it/s] Loading 0: 75%|███████▍ | 272/363 [00:08<00:02, 37.65it/s] Loading 0: 76%|███████▌ | 276/363 [00:08<00:02, 33.14it/s] Loading 0: 78%|███████▊ | 282/363 [00:08<00:02, 37.56it/s] Loading 0: 79%|███████▉ | 286/363 [00:08<00:02, 35.86it/s] Loading 0: 80%|████████ | 291/363 [00:08<00:01, 37.58it/s] Loading 0: 81%|████████▏ | 295/363 [00:08<00:01, 35.93it/s] Loading 0: 82%|████████▏ | 299/363 [00:08<00:01, 35.94it/s] Loading 0: 84%|████████▎ | 304/363 [00:16<00:28, 2.04it/s] Loading 0: 85%|████████▍ | 307/363 [00:16<00:21, 2.55it/s] Loading 0: 86%|████████▌ | 312/363 [00:16<00:13, 3.76it/s] Loading 0: 88%|████████▊ | 320/363 [00:16<00:06, 6.44it/s] Loading 0: 90%|████████▉ | 325/363 [00:16<00:04, 8.52it/s] Loading 0: 91%|█████████ | 330/363 [00:16<00:03, 10.46it/s] Loading 0: 93%|█████████▎| 337/363 [00:16<00:01, 15.09it/s] Loading 0: 94%|█████████▍| 342/363 [00:16<00:01, 18.19it/s] Loading 0: 96%|█████████▌| 347/363 [00:17<00:00, 21.69it/s] Loading 0: 97%|█████████▋| 352/363 [00:17<00:00, 25.72it/s] Loading 0: 98%|█████████▊| 357/363 [00:17<00:00, 25.63it/s]
Job jic062-dpo-v1-8-c500-v1-mkmlizer completed after 127.19s with status: succeeded
Stopping job with name jic062-dpo-v1-8-c500-v1-mkmlizer
Pipeline stage MKMLizer completed in 127.95s
run pipeline stage %s
Running pipeline stage MKMLTemplater
Pipeline stage MKMLTemplater completed in 1.36s
run pipeline stage %s
Running pipeline stage MKMLDeployer
Creating inference service jic062-dpo-v1-8-c500-v1
Waiting for inference service jic062-dpo-v1-8-c500-v1 to be ready
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Inference service jic062-dpo-v1-8-c500-v1 ready after 212.8509533405304s
Pipeline stage MKMLDeployer completed in 215.14s
run pipeline stage %s
Running pipeline stage StressChecker
Received healthy response to inference request in 2.8413803577423096s
Received healthy response to inference request in 2.202681303024292s
Received healthy response to inference request in 2.8125030994415283s
Received healthy response to inference request in 4.703707456588745s
Received healthy response to inference request in 4.565014362335205s
5 requests
0 failed requests
5th percentile: 2.324645662307739
10th percentile: 2.4466100215911863
20th percentile: 2.690538740158081
30th percentile: 2.8182785511016846
40th percentile: 2.829829454421997
50th percentile: 2.8413803577423096
60th percentile: 3.5308339595794678
70th percentile: 4.220287561416626
80th percentile: 4.592752981185913
90th percentile: 4.6482302188873295
95th percentile: 4.675968837738037
99th percentile: 4.6981597328186036
mean time: 3.425057315826416
%s, retrying in %s seconds...
Received healthy response to inference request in 2.8905248641967773s
Received healthy response to inference request in 2.193399667739868s
Received healthy response to inference request in 3.235398292541504s
Received healthy response to inference request in 2.6207239627838135s
Received healthy response to inference request in 2.1914279460906982s
5 requests
0 failed requests
5th percentile: 2.1918222904205322
10th percentile: 2.192216634750366
20th percentile: 2.193005323410034
30th percentile: 2.278864526748657
40th percentile: 2.4497942447662355
50th percentile: 2.6207239627838135
60th percentile: 2.728644323348999
70th percentile: 2.8365646839141845
80th percentile: 2.959499549865723
90th percentile: 3.0974489212036134
95th percentile: 3.1664236068725584
99th percentile: 3.221603355407715
mean time: 2.6262949466705323
Pipeline stage StressChecker completed in 35.16s
run pipeline stage %s
Running pipeline stage TriggerMKMLProfilingPipeline
run_pipeline:run_in_cloud %s
starting trigger_guanaco_pipeline args=%s
Pipeline stage TriggerMKMLProfilingPipeline completed in 8.79s
Shutdown handler de-registered
jic062-dpo-v1-8-c500_v1 status is now deployed due to DeploymentManager action
Shutdown handler registered
run pipeline %s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeleter
Skipping teardown as no inference service was successfully deployed
Pipeline stage MKMLProfilerDeleter completed in 0.15s
run pipeline stage %s
Running pipeline stage MKMLProfilerTemplater
Pipeline stage MKMLProfilerTemplater completed in 0.12s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeployer
Creating inference service jic062-dpo-v1-8-c500-v1-profiler
Waiting for inference service jic062-dpo-v1-8-c500-v1-profiler to be ready
Inference service jic062-dpo-v1-8-c500-v1-profiler ready after 220.52576851844788s
Pipeline stage MKMLProfilerDeployer completed in 220.93s
run pipeline stage %s
Running pipeline stage MKMLProfilerRunner
kubectl cp /code/guanaco/guanaco_inference_services/src/inference_scripts tenant-chaiml-guanaco/jic062-dpo-v1-8-c500-v1-profiler-predictor-00001-deploymenv5hrm:/code/chaiverse_profiler_1727243507 --namespace tenant-chaiml-guanaco
kubectl exec -it jic062-dpo-v1-8-c500-v1-profiler-predictor-00001-deploymenv5hrm --namespace tenant-chaiml-guanaco -- sh -c 'cd /code/chaiverse_profiler_1727243507 && python profiles.py profile --best_of_n 8 --auto_batch 5 --batches 1,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110,115,120,125,130,135,140,145,150,155,160,165,170,175,180,185,190,195 --samples 200 --input_tokens 1024 --output_tokens 64 --summary /code/chaiverse_profiler_1727243507/summary.json'
kubectl exec -it jic062-dpo-v1-8-c500-v1-profiler-predictor-00001-deploymenv5hrm --namespace tenant-chaiml-guanaco -- bash -c 'cat /code/chaiverse_profiler_1727243507/summary.json'
Pipeline stage MKMLProfilerRunner completed in 1157.39s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeleter
Checking if service jic062-dpo-v1-8-c500-v1-profiler is running
Tearing down inference service jic062-dpo-v1-8-c500-v1-profiler
Service jic062-dpo-v1-8-c500-v1-profiler has been torndown
Pipeline stage MKMLProfilerDeleter completed in 2.09s
Shutdown handler de-registered
jic062-dpo-v1-8-c500_v1 status is now inactive due to auto deactivation removed underperforming models
Shutdown handler de-registered
jic062-dpo-v1-8-c500_v1 status is now torndown due to DeploymentManager action