submission_id: zonemercy-lexical-nemo-_1518_v25
developer_uid: zonemercy
best_of: 2
celo_rating: 1226.2
display_name: 0906stv1-2
family_friendly_score: 0.0
formatter: {'memory_template': "Write a slice of story that takes place over the course of a single day in Bot's life. Use stream-of-consciousness narration to explore the character's thoughts and perceptions. Include poetic, impressionistic descriptions of the character's surroundings and sensations. Weave in memories and reflections that provide insight into the Bot's past and inner life. The scene should feel like part of a lived-in world, with the scene naturally existing in a wider story.\nBot's Name: {bot_name}\nBot's Persona: {memory}\n####\n", 'prompt_template': '', 'bot_template': 'Bot: {message}\n', 'user_template': 'User: {message}\n', 'response_template': 'Bot[Start story]:', 'truncate_by_message': False}
generation_params: {'temperature': 1.0, 'top_p': 1.0, 'min_p': 0.05, 'top_k': 100, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['</s>', 'Bot:', 'User:', 'You:'], 'max_input_tokens': 1024, 'best_of': 2, 'max_output_tokens': 128}
gpu_counts: {'NVIDIA RTX A5000': 1}
ineligible_reason: max_output_tokens!=64
is_internal_developer: True
language_model: zonemercy/Lexical-Nemo-v4-1k1e5
latencies: [{'batch_size': 1, 'throughput': 0.36108334366135625, 'latency_mean': 2.769383773803711, 'latency_p50': 2.760484457015991, 'latency_p90': 2.9319058656692505}, {'batch_size': 3, 'throughput': 0.8554400519964614, 'latency_mean': 3.503186262845993, 'latency_p50': 3.514250636100769, 'latency_p90': 3.6856832265853883}, {'batch_size': 5, 'throughput': 1.1830378589354258, 'latency_mean': 4.205161641836167, 'latency_p50': 4.222694993019104, 'latency_p90': 4.553855323791503}, {'batch_size': 6, 'throughput': 1.3029189579977776, 'latency_mean': 4.557749328613281, 'latency_p50': 4.5651609897613525, 'latency_p90': 4.919959926605225}, {'batch_size': 8, 'throughput': 1.4967585758215312, 'latency_mean': 5.3012175989151, 'latency_p50': 5.303738713264465, 'latency_p90': 5.720939874649048}, {'batch_size': 10, 'throughput': 1.6183693290230878, 'latency_mean': 6.137161530256272, 'latency_p50': 6.133976221084595, 'latency_p90': 6.682023406028748}]
max_input_tokens: 1024
max_output_tokens: 128
model_architecture: MistralForCausalLM
model_group: zonemercy/Lexical-Nemo-v
model_name: 0906stv1-2
model_num_parameters: 12772070400.0
model_repo: zonemercy/Lexical-Nemo-v4-1k1e5
model_size: 13B
num_battles: 10168
num_wins: 5133
ranking_group: single
status: torndown
submission_type: basic
throughput_3p7s: 0.96
timestamp: 2024-09-06T12:41:15+00:00
us_pacific_date: 2024-09-06
win_ratio: 0.5048190401258851
Download Preference Data
Resubmit model
Shutdown handler not registered because Python interpreter is not running in the main thread
run pipeline %s
run pipeline stage %s
Running pipeline stage MKMLizer
Starting job with name zonemercy-lexical-nemo-1518-v25-mkmlizer
Waiting for job on zonemercy-lexical-nemo-1518-v25-mkmlizer to finish
zonemercy-lexical-nemo-1518-v25-mkmlizer: ╔═════════════════════════════════════════════════════════════════════╗
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ _____ __ __ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ / _/ /_ ___ __/ / ___ ___ / / ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ / _/ / // / |/|/ / _ \/ -_) -_) / ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ /_//_/\_, /|__,__/_//_/\__/\__/_/ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ /___/ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ Version: 0.10.1 ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ Copyright 2023 MK ONE TECHNOLOGIES Inc. ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ https://mk1.ai ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ The license key for the current software has been verified as ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ belonging to: ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ Chai Research Corp. ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ Account ID: 7997a29f-0ceb-4cc7-9adf-840c57b4ae6f ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ Expiration: 2024-10-15 23:59:59 ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ║ ║
zonemercy-lexical-nemo-1518-v25-mkmlizer: ╚═════════════════════════════════════════════════════════════════════╝
zonemercy-lexical-nemo-1518-v25-mkmlizer: Downloaded to shared memory in 55.275s
zonemercy-lexical-nemo-1518-v25-mkmlizer: quantizing model to /dev/shm/model_cache, profile:s0, folder:/tmp/tmpa08j2q6y, device:0
zonemercy-lexical-nemo-1518-v25-mkmlizer: Saving flywheel model at /dev/shm/model_cache
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
Connection pool is full, discarding connection: %s. Connection pool size: %s
zonemercy-lexical-nemo-1518-v25-mkmlizer: quantized model in 40.830s
zonemercy-lexical-nemo-1518-v25-mkmlizer: Processed model zonemercy/Lexical-Nemo-v4-1k1e5 in 96.106s
zonemercy-lexical-nemo-1518-v25-mkmlizer: creating bucket guanaco-mkml-models
zonemercy-lexical-nemo-1518-v25-mkmlizer: Bucket 's3://guanaco-mkml-models/' created
zonemercy-lexical-nemo-1518-v25-mkmlizer: uploading /dev/shm/model_cache to s3://guanaco-mkml-models/zonemercy-lexical-nemo-1518-v25
zonemercy-lexical-nemo-1518-v25-mkmlizer: cp /dev/shm/model_cache/config.json s3://guanaco-mkml-models/zonemercy-lexical-nemo-1518-v25/config.json
zonemercy-lexical-nemo-1518-v25-mkmlizer: cp /dev/shm/model_cache/special_tokens_map.json s3://guanaco-mkml-models/zonemercy-lexical-nemo-1518-v25/special_tokens_map.json
zonemercy-lexical-nemo-1518-v25-mkmlizer: cp /dev/shm/model_cache/tokenizer_config.json s3://guanaco-mkml-models/zonemercy-lexical-nemo-1518-v25/tokenizer_config.json
zonemercy-lexical-nemo-1518-v25-mkmlizer: cp /dev/shm/model_cache/tokenizer.json s3://guanaco-mkml-models/zonemercy-lexical-nemo-1518-v25/tokenizer.json
zonemercy-lexical-nemo-1518-v25-mkmlizer: cp /dev/shm/model_cache/flywheel_model.0.safetensors s3://guanaco-mkml-models/zonemercy-lexical-nemo-1518-v25/flywheel_model.0.safetensors
zonemercy-lexical-nemo-1518-v25-mkmlizer: Loading 0: 0%| | 0/363 [00:00<?, ?it/s] Loading 0: 1%|▏ | 5/363 [00:00<00:16, 22.26it/s] Loading 0: 3%|▎ | 10/363 [00:00<00:12, 28.39it/s] Loading 0: 4%|▍ | 14/363 [00:00<00:14, 24.61it/s] Loading 0: 6%|▌ | 20/363 [00:00<00:10, 33.33it/s] Loading 0: 7%|▋ | 24/363 [00:00<00:14, 23.41it/s] Loading 0: 7%|▋ | 27/363 [00:01<00:15, 22.31it/s] Loading 0: 9%|▊ | 31/363 [00:01<00:12, 25.78it/s] Loading 0: 10%|▉ | 35/363 [00:01<00:11, 27.64it/s] Loading 0: 11%|█ | 39/363 [00:01<00:11, 28.27it/s] Loading 0: 12%|█▏ | 43/363 [00:01<00:11, 27.12it/s] Loading 0: 13%|█▎ | 48/363 [00:01<00:10, 29.87it/s] Loading 0: 14%|█▍ | 52/363 [00:01<00:10, 28.89it/s] Loading 0: 15%|█▌ | 56/363 [00:02<00:10, 29.36it/s] Loading 0: 17%|█▋ | 61/363 [00:02<00:12, 24.98it/s] Loading 0: 18%|█▊ | 64/363 [00:02<00:13, 22.29it/s] Loading 0: 20%|█▉ | 71/363 [00:02<00:10, 29.10it/s] Loading 0: 21%|██ | 75/363 [00:02<00:10, 27.81it/s] Loading 0: 21%|██▏ | 78/363 [00:02<00:11, 25.71it/s] Loading 0: 23%|██▎ | 84/363 [00:03<00:09, 30.02it/s] Loading 0: 24%|██▍ | 88/363 [00:03<00:09, 28.51it/s] Loading 0: 26%|██▌ | 93/363 [00:03<00:08, 30.93it/s] Loading 0: 27%|██▋ | 97/363 [00:03<00:08, 29.63it/s] Loading 0: 28%|██▊ | 101/363 [00:03<00:10, 23.85it/s] Loading 0: 29%|██▊ | 104/363 [00:03<00:12, 21.22it/s] Loading 0: 31%|███ | 111/363 [00:04<00:08, 28.01it/s] Loading 0: 32%|███▏ | 115/363 [00:04<00:09, 27.47it/s] Loading 0: 33%|███▎ | 120/363 [00:04<00:08, 30.22it/s] Loading 0: 34%|███▍ | 124/363 [00:04<00:08, 29.13it/s] Loading 0: 36%|███▌ | 129/363 [00:04<00:07, 29.62it/s] Loading 0: 37%|███▋ | 133/363 [00:04<00:08, 27.64it/s] Loading 0: 38%|███▊ | 137/363 [00:05<00:08, 28.11it/s] Loading 0: 39%|███▉ | 142/363 [00:05<00:09, 23.97it/s] Loading 0: 40%|███▉ | 145/363 [00:05<00:09, 23.11it/s] Loading 0: 41%|████ | 149/363 [00:05<00:09, 22.37it/s] Loading 0: 43%|████▎ | 156/363 [00:05<00:07, 29.07it/s] Loading 0: 44%|████▍ | 160/363 [00:05<00:07, 28.01it/s] Loading 0: 45%|████▌ | 165/363 [00:06<00:06, 30.56it/s] Loading 0: 47%|████▋ | 169/363 [00:06<00:06, 28.99it/s] Loading 0: 48%|████▊ | 174/363 [00:06<00:06, 30.81it/s] Loading 0: 49%|████▉ | 178/363 [00:06<00:06, 29.61it/s] Loading 0: 50%|█████ | 182/363 [00:06<00:07, 23.99it/s] Loading 0: 51%|█████ | 185/363 [00:06<00:08, 21.15it/s] Loading 0: 52%|█████▏ | 190/363 [00:07<00:06, 26.52it/s] Loading 0: 53%|█████▎ | 194/363 [00:07<00:06, 24.43it/s] Loading 0: 55%|█████▍ | 199/363 [00:07<00:05, 29.42it/s] Loading 0: 56%|█████▌ | 203/363 [00:07<00:06, 26.59it/s] Loading 0: 58%|█████▊ | 210/363 [00:07<00:04, 32.81it/s] Loading 0: 59%|█████▉ | 214/363 [00:07<00:04, 30.57it/s] Loading 0: 60%|██████ | 218/363 [00:07<00:04, 29.86it/s] Loading 0: 61%|██████▏ | 223/363 [00:08<00:05, 25.77it/s] Loading 0: 62%|██████▏ | 226/363 [00:08<00:05, 24.07it/s] Loading 0: 63%|██████▎ | 230/363 [00:08<00:05, 22.80it/s] Loading 0: 65%|██████▌ | 237/363 [00:08<00:04, 29.17it/s] Loading 0: 66%|██████▋ | 241/363 [00:08<00:04, 28.47it/s] Loading 0: 68%|██████▊ | 246/363 [00:09<00:03, 30.74it/s] Loading 0: 69%|██████▉ | 250/363 [00:09<00:03, 29.41it/s] Loading 0: 70%|███████ | 255/363 [00:09<00:03, 31.02it/s] Loading 0: 71%|███████▏ | 259/363 [00:09<00:03, 29.70it/s] Loading 0: 72%|███████▏ | 263/363 [00:09<00:04, 23.36it/s] Loading 0: 73%|███████▎ | 266/363 [00:09<00:04, 21.22it/s] Loading 0: 75%|███████▍ | 271/363 [00:10<00:03, 26.55it/s] Loading 0: 76%|███████▌ | 275/363 [00:10<00:03, 24.54it/s] Loading 0: 78%|███████▊ | 282/363 [00:10<00:02, 31.14it/s] Loading 0: 79%|███████▉ | 286/363 [00:10<00:02, 29.29it/s] Loading 0: 80%|████████ | 291/363 [00:10<00:02, 31.33it/s] Loading 0: 81%|████████▏ | 295/363 [00:10<00:02, 30.18it/s] Loading 0: 82%|████████▏ | 299/363 [00:10<00:02, 30.47it/s] Loading 0: 84%|████████▎ | 304/363 [00:11<00:02, 26.00it/s] Loading 0: 85%|████████▍ | 307/363 [00:11<00:02, 24.95it/s] Loading 0: 86%|████████▌ | 311/363 [00:11<00:02, 23.79it/s] Loading 0: 87%|████████▋ | 316/363 [00:11<00:01, 28.82it/s] Loading 0: 88%|████████▊ | 320/363 [00:11<00:01, 26.47it/s] Loading 0: 90%|█████████ | 327/363 [00:11<00:01, 32.58it/s] Loading 0: 91%|█████████ | 331/363 [00:12<00:01, 31.08it/s] Loading 0: 93%|█████████▎| 336/363 [00:12<00:00, 33.18it/s] Loading 0: 94%|█████████▎| 340/363 [00:12<00:00, 31.53it/s] Loading 0: 95%|█████████▍| 344/363 [00:19<00:09, 2.03it/s] Loading 0: 96%|█████████▌| 348/363 [00:19<00:05, 2.72it/s] Loading 0: 97%|█████████▋| 353/363 [00:19<00:02, 3.93it/s] Loading 0: 98%|█████████▊| 357/363 [00:19<00:01, 5.08it/s]
Job zonemercy-lexical-nemo-1518-v25-mkmlizer completed after 126.3s with status: succeeded
Stopping job with name zonemercy-lexical-nemo-1518-v25-mkmlizer
Pipeline stage MKMLizer completed in 127.91s
run pipeline stage %s
Running pipeline stage MKMLTemplater
Pipeline stage MKMLTemplater completed in 0.09s
run pipeline stage %s
Running pipeline stage MKMLDeployer
Creating inference service zonemercy-lexical-nemo-1518-v25
Waiting for inference service zonemercy-lexical-nemo-1518-v25 to be ready
Failed to get response for submission zonemercy-lexical-nemo-_1518_v23: ('http://zonemercy-lexical-nemo-1518-v23-predictor.tenant-chaiml-guanaco.k.chaiverse.com/v1/models/GPT-J-6B-lit-v2:predict', '{"error":"ValueError : [TypeError(\\"\'numpy.int64\' object is not iterable\\"), TypeError(\'vars() argument must have __dict__ attribute\')]"}')
Failed to get response for submission zonemercy-base-story-v1_v2: ('http://zonemercy-base-story-v1-v2-predictor.tenant-chaiml-guanaco.k.chaiverse.com/v1/models/GPT-J-6B-lit-v2:predict', '{"error":"ValueError : [TypeError(\\"\'numpy.int64\' object is not iterable\\"), TypeError(\'vars() argument must have __dict__ attribute\')]"}')
Failed to get response for submission zonemercy-base-story-v1_v3: ('http://zonemercy-base-story-v1-v3-predictor.tenant-chaiml-guanaco.k.chaiverse.com/v1/models/GPT-J-6B-lit-v2:predict', 'upstream connect error or disconnect/reset before headers. reset reason: connection timeout')
Inference service zonemercy-lexical-nemo-1518-v25 ready after 151.35462999343872s
Pipeline stage MKMLDeployer completed in 151.86s
run pipeline stage %s
Running pipeline stage StressChecker
Received healthy response to inference request in 3.7171456813812256s
Received healthy response to inference request in 3.166182518005371s
Received healthy response to inference request in 2.770496129989624s
Received healthy response to inference request in 2.922053098678589s
Received healthy response to inference request in 3.6367290019989014s
5 requests
0 failed requests
5th percentile: 2.800807523727417
10th percentile: 2.83111891746521
20th percentile: 2.891741704940796
30th percentile: 2.9708789825439452
40th percentile: 3.0685307502746584
50th percentile: 3.166182518005371
60th percentile: 3.354401111602783
70th percentile: 3.5426197052001953
80th percentile: 3.6528123378753663
90th percentile: 3.6849790096282957
95th percentile: 3.7010623455047607
99th percentile: 3.7139290142059327
mean time: 3.242521286010742
Pipeline stage StressChecker completed in 20.14s
run pipeline stage %s
Running pipeline stage TriggerMKMLProfilingPipeline
run_pipeline:run_in_cloud %s
starting trigger_guanaco_pipeline args=%s
Pipeline stage TriggerMKMLProfilingPipeline completed in 7.42s
Shutdown handler de-registered
zonemercy-lexical-nemo-_1518_v25 status is now deployed due to DeploymentManager action
Shutdown handler registered
run pipeline %s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeleter
Skipping teardown as no inference service was successfully deployed
Pipeline stage MKMLProfilerDeleter completed in 0.11s
run pipeline stage %s
Running pipeline stage MKMLProfilerTemplater
Pipeline stage MKMLProfilerTemplater completed in 0.11s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeployer
Creating inference service zonemercy-lexical-nemo-1518-v25-profiler
Waiting for inference service zonemercy-lexical-nemo-1518-v25-profiler to be ready
Inference service zonemercy-lexical-nemo-1518-v25-profiler ready after 150.53605103492737s
Pipeline stage MKMLProfilerDeployer completed in 150.89s
run pipeline stage %s
Running pipeline stage MKMLProfilerRunner
kubectl cp /code/guanaco/guanaco_inference_services/src/inference_scripts tenant-chaiml-guanaco/zonemercy-lexical-ne265080259c31079413e7b8b1bfb6249d-deplomn446:/code/chaiverse_profiler_1725626969 --namespace tenant-chaiml-guanaco
kubectl exec -it zonemercy-lexical-ne265080259c31079413e7b8b1bfb6249d-deplomn446 --namespace tenant-chaiml-guanaco -- sh -c 'cd /code/chaiverse_profiler_1725626969 && python profiles.py profile --best_of_n 2 --auto_batch 5 --batches 1,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110,115,120,125,130,135,140,145,150,155,160,165,170,175,180,185,190,195 --samples 200 --input_tokens 1024 --output_tokens 128 --summary /code/chaiverse_profiler_1725626969/summary.json'
kubectl exec -it zonemercy-lexical-ne265080259c31079413e7b8b1bfb6249d-deplomn446 --namespace tenant-chaiml-guanaco -- bash -c 'cat /code/chaiverse_profiler_1725626969/summary.json'
Pipeline stage MKMLProfilerRunner completed in 1374.68s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeleter
Checking if service zonemercy-lexical-nemo-1518-v25-profiler is running
Tearing down inference service zonemercy-lexical-nemo-1518-v25-profiler
Service zonemercy-lexical-nemo-1518-v25-profiler has been torndown
Pipeline stage MKMLProfilerDeleter completed in 1.62s
Shutdown handler de-registered
zonemercy-lexical-nemo-_1518_v25 status is now inactive due to auto deactivation removed underperforming models
zonemercy-lexical-nemo-_1518_v27 status is now torndown due to DeploymentManager action