submission_id: stark2000s-utarcaht-v1-2_v1
developer_uid: stark2000s
alignment_samples: 14560
alignment_score: -0.13203775865303996
best_of: 1
celo_rating: 1095.21
display_name: stark2000s-utarcaht-v1-2_v1
formatter: {'memory_template': "{bot_name}'s Persona: {memory}\n####\n", 'prompt_template': '{prompt}\n<START>\n', 'bot_template': '{bot_name}: {message}\n', 'user_template': '{user_name}: {message}\n', 'response_template': '{bot_name}:', 'truncate_by_message': False}
generation_params: {'temperature': 1.0, 'top_p': 1.0, 'min_p': 0.0, 'top_k': 40, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['\n'], 'max_input_tokens': 512, 'best_of': 1, 'max_output_tokens': 64}
gpu_counts: {'NVIDIA RTX A5000': 1}
is_internal_developer: False
language_model: stark2000s/utarcaht-v1.2
latencies: [{'batch_size': 1, 'throughput': 1.0688675594477886, 'latency_mean': 0.9354757058620453, 'latency_p50': 0.9362729787826538, 'latency_p90': 1.0499861717224122}, {'batch_size': 5, 'throughput': 3.502017757875743, 'latency_mean': 1.4197851145267486, 'latency_p50': 1.422858476638794, 'latency_p90': 1.5941072940826415}, {'batch_size': 10, 'throughput': 5.15679012835192, 'latency_mean': 1.9161175179481507, 'latency_p50': 1.91525399684906, 'latency_p90': 2.153678822517395}, {'batch_size': 15, 'throughput': 6.061593217363593, 'latency_mean': 2.4450462746620176, 'latency_p50': 2.43346107006073, 'latency_p90': 2.812287712097168}, {'batch_size': 20, 'throughput': 6.53732354548411, 'latency_mean': 3.007975550889969, 'latency_p50': 2.9740006923675537, 'latency_p90': 3.477402281761169}, {'batch_size': 25, 'throughput': 6.96517762410716, 'latency_mean': 3.5196910667419434, 'latency_p50': 3.4997873306274414, 'latency_p90': 4.035225844383239}, {'batch_size': 30, 'throughput': 7.231691721222628, 'latency_mean': 4.05731275677681, 'latency_p50': 4.013387560844421, 'latency_p90': 4.87428286075592}, {'batch_size': 35, 'throughput': 7.382205116757551, 'latency_mean': 4.622128660678864, 'latency_p50': 4.585745215415955, 'latency_p90': 5.513689422607421}, {'batch_size': 40, 'throughput': 7.318238487037162, 'latency_mean': 5.282840359210968, 'latency_p50': 5.226061701774597, 'latency_p90': 6.469784951210022}]
max_input_tokens: 512
max_output_tokens: 64
model_architecture: LlamaForCausalLM
model_group: stark2000s/utarcaht-v1.2
model_name: stark2000s-utarcaht-v1-2_v1
model_num_parameters: 8030261248.0
model_repo: stark2000s/utarcaht-v1.2
model_size: 8B
num_battles: 14560
num_wins: 4452
propriety_score: 0.7440878378378378
propriety_total_count: 1184.0
ranking_group: single
status: inactive
submission_type: basic
throughput_3p7s: 7.21
timestamp: 2024-09-05T09:45:39+00:00
us_pacific_date: 2024-09-05
win_ratio: 0.3057692307692308
Download Preference Data
Resubmit model
Shutdown handler not registered because Python interpreter is not running in the main thread
run pipeline %s
run pipeline stage %s
Running pipeline stage MKMLizer
Starting job with name stark2000s-utarcaht-v1-2-v1-mkmlizer
Waiting for job on stark2000s-utarcaht-v1-2-v1-mkmlizer to finish
stark2000s-utarcaht-v1-2-v1-mkmlizer: ╔═════════════════════════════════════════════════════════════════════╗
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ _____ __ __ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ / _/ /_ ___ __/ / ___ ___ / / ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ / _/ / // / |/|/ / _ \/ -_) -_) / ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ /_//_/\_, /|__,__/_//_/\__/\__/_/ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ /___/ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ Version: 0.10.1 ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ Copyright 2023 MK ONE TECHNOLOGIES Inc. ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ https://mk1.ai ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ The license key for the current software has been verified as ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ belonging to: ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ Chai Research Corp. ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ Account ID: 7997a29f-0ceb-4cc7-9adf-840c57b4ae6f ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ Expiration: 2024-10-15 23:59:59 ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ║ ║
stark2000s-utarcaht-v1-2-v1-mkmlizer: ╚═════════════════════════════════════════════════════════════════════╝
stark2000s-utarcaht-v1-2-v1-mkmlizer: Downloaded to shared memory in 32.790s
stark2000s-utarcaht-v1-2-v1-mkmlizer: quantizing model to /dev/shm/model_cache, profile:s0, folder:/tmp/tmpk88msp2a, device:0
stark2000s-utarcaht-v1-2-v1-mkmlizer: Saving flywheel model at /dev/shm/model_cache
stark2000s-utarcaht-v1-2-v1-mkmlizer: quantized model in 25.511s
stark2000s-utarcaht-v1-2-v1-mkmlizer: Processed model stark2000s/utarcaht-v1.2 in 58.301s
stark2000s-utarcaht-v1-2-v1-mkmlizer: creating bucket guanaco-mkml-models
stark2000s-utarcaht-v1-2-v1-mkmlizer: Bucket 's3://guanaco-mkml-models/' created
stark2000s-utarcaht-v1-2-v1-mkmlizer: uploading /dev/shm/model_cache to s3://guanaco-mkml-models/stark2000s-utarcaht-v1-2-v1
stark2000s-utarcaht-v1-2-v1-mkmlizer: cp /dev/shm/model_cache/config.json s3://guanaco-mkml-models/stark2000s-utarcaht-v1-2-v1/config.json
stark2000s-utarcaht-v1-2-v1-mkmlizer: cp /dev/shm/model_cache/special_tokens_map.json s3://guanaco-mkml-models/stark2000s-utarcaht-v1-2-v1/special_tokens_map.json
stark2000s-utarcaht-v1-2-v1-mkmlizer: cp /dev/shm/model_cache/tokenizer_config.json s3://guanaco-mkml-models/stark2000s-utarcaht-v1-2-v1/tokenizer_config.json
stark2000s-utarcaht-v1-2-v1-mkmlizer: cp /dev/shm/model_cache/tokenizer.json s3://guanaco-mkml-models/stark2000s-utarcaht-v1-2-v1/tokenizer.json
stark2000s-utarcaht-v1-2-v1-mkmlizer: cp /dev/shm/model_cache/flywheel_model.0.safetensors s3://guanaco-mkml-models/stark2000s-utarcaht-v1-2-v1/flywheel_model.0.safetensors
stark2000s-utarcaht-v1-2-v1-mkmlizer: Loading 0: 0%| | 0/291 [00:00<?, ?it/s] Loading 0: 2%|▏ | 7/291 [00:00<00:05, 54.34it/s] Loading 0: 8%|▊ | 22/291 [00:00<00:03, 84.74it/s] Loading 0: 12%|█▏ | 34/291 [00:00<00:03, 84.86it/s] Loading 0: 17%|█▋ | 49/291 [00:00<00:02, 91.29it/s] Loading 0: 20%|██ | 59/291 [00:00<00:02, 91.04it/s] Loading 0: 24%|██▍ | 70/291 [00:00<00:02, 87.95it/s] Loading 0: 27%|██▋ | 79/291 [00:00<00:02, 86.55it/s] Loading 0: 30%|███ | 88/291 [00:02<00:08, 23.39it/s] Loading 0: 33%|███▎ | 97/291 [00:02<00:06, 29.35it/s] Loading 0: 36%|███▋ | 106/291 [00:02<00:05, 36.32it/s] Loading 0: 40%|███▉ | 115/291 [00:02<00:04, 43.93it/s] Loading 0: 43%|████▎ | 124/291 [00:02<00:03, 51.65it/s] Loading 0: 48%|████▊ | 139/291 [00:02<00:02, 65.50it/s] Loading 0: 51%|█████ | 149/291 [00:02<00:01, 72.52it/s] Loading 0: 55%|█████▍ | 160/291 [00:02<00:01, 74.27it/s] Loading 0: 60%|██████ | 175/291 [00:02<00:01, 83.96it/s] Loading 0: 64%|██████▍ | 186/291 [00:03<00:01, 89.49it/s] Loading 0: 67%|██████▋ | 196/291 [00:04<00:03, 25.82it/s] Loading 0: 71%|███████ | 206/291 [00:04<00:02, 32.36it/s] Loading 0: 74%|███████▍ | 215/291 [00:04<00:01, 38.90it/s] Loading 0: 79%|███████▊ | 229/291 [00:04<00:01, 50.60it/s] Loading 0: 83%|████████▎ | 241/291 [00:04<00:00, 58.46it/s] Loading 0: 86%|████████▋ | 251/291 [00:04<00:00, 65.48it/s] Loading 0: 91%|█████████ | 265/291 [00:04<00:00, 74.93it/s] Loading 0: 95%|█████████▍| 275/291 [00:05<00:00, 80.09it/s] Loading 0: 98%|█████████▊| 286/291 [00:05<00:00, 80.17it/s]
Job stark2000s-utarcaht-v1-2-v1-mkmlizer completed after 84.09s with status: succeeded
Stopping job with name stark2000s-utarcaht-v1-2-v1-mkmlizer
Pipeline stage MKMLizer completed in 85.04s
run pipeline stage %s
Running pipeline stage MKMLTemplater
Pipeline stage MKMLTemplater completed in 0.11s
run pipeline stage %s
Running pipeline stage MKMLDeployer
Creating inference service stark2000s-utarcaht-v1-2-v1
Waiting for inference service stark2000s-utarcaht-v1-2-v1 to be ready
Connection pool is full, discarding connection: %s. Connection pool size: %s
Failed to get response for submission blend_susol_2024-08-22: ('http://mistralai-mixtral-8x7b-3473-v130-predictor.tenant-chaiml-guanaco.k.chaiverse.com/v1/models/GPT-J-6B-lit-v2:predict', 'read tcp 127.0.0.1:57258->127.0.0.1:8080: read: connection reset by peer\n')
Inference service stark2000s-utarcaht-v1-2-v1 ready after 140.49311542510986s
Pipeline stage MKMLDeployer completed in 140.96s
run pipeline stage %s
Running pipeline stage StressChecker
Received healthy response to inference request in 1.7897138595581055s
Received healthy response to inference request in 0.40039968490600586s
Received healthy response to inference request in 0.8121316432952881s
Received healthy response to inference request in 1.5040464401245117s
Received healthy response to inference request in 0.5852932929992676s
5 requests
0 failed requests
5th percentile: 0.4373784065246582
10th percentile: 0.47435712814331055
20th percentile: 0.5483145713806152
30th percentile: 0.6306609630584716
40th percentile: 0.7213963031768799
50th percentile: 0.8121316432952881
60th percentile: 1.0888975620269774
70th percentile: 1.365663480758667
80th percentile: 1.5611799240112305
90th percentile: 1.6754468917846679
95th percentile: 1.7325803756713867
99th percentile: 1.7782871627807617
mean time: 1.0183169841766357
Pipeline stage StressChecker completed in 6.07s
run pipeline stage %s
Running pipeline stage TriggerMKMLProfilingPipeline
run_pipeline:run_in_cloud %s
starting trigger_guanaco_pipeline args=%s
Pipeline stage TriggerMKMLProfilingPipeline completed in 4.54s
Shutdown handler de-registered
stark2000s-utarcaht-v1-2_v1 status is now deployed due to DeploymentManager action
Shutdown handler registered
run pipeline %s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeleter
Skipping teardown as no inference service was successfully deployed
Pipeline stage MKMLProfilerDeleter completed in 0.12s
run pipeline stage %s
Running pipeline stage MKMLProfilerTemplater
Pipeline stage MKMLProfilerTemplater completed in 0.12s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeployer
Creating inference service stark2000s-utarcaht-v1-2-v1-profiler
Waiting for inference service stark2000s-utarcaht-v1-2-v1-profiler to be ready
Inference service stark2000s-utarcaht-v1-2-v1-profiler ready after 150.35386967658997s
Pipeline stage MKMLProfilerDeployer completed in 150.72s
run pipeline stage %s
Running pipeline stage MKMLProfilerRunner
kubectl cp /code/guanaco/guanaco_inference_services/src/inference_scripts tenant-chaiml-guanaco/stark2000s-utarcaht-v1-2-v1-profiler-predictor-00001-deplogjjqw:/code/chaiverse_profiler_1725529968 --namespace tenant-chaiml-guanaco
kubectl exec -it stark2000s-utarcaht-v1-2-v1-profiler-predictor-00001-deplogjjqw --namespace tenant-chaiml-guanaco -- sh -c 'cd /code/chaiverse_profiler_1725529968 && python profiles.py profile --best_of_n 1 --auto_batch 5 --batches 1,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110,115,120,125,130,135,140,145,150,155,160,165,170,175,180,185,190,195 --samples 200 --input_tokens 512 --output_tokens 64 --summary /code/chaiverse_profiler_1725529968/summary.json'
kubectl exec -it stark2000s-utarcaht-v1-2-v1-profiler-predictor-00001-deplogjjqw --namespace tenant-chaiml-guanaco -- bash -c 'cat /code/chaiverse_profiler_1725529968/summary.json'
Pipeline stage MKMLProfilerRunner completed in 463.29s
run pipeline stage %s
Running pipeline stage MKMLProfilerDeleter
Checking if service stark2000s-utarcaht-v1-2-v1-profiler is running
Tearing down inference service stark2000s-utarcaht-v1-2-v1-profiler
Service stark2000s-utarcaht-v1-2-v1-profiler has been torndown
Pipeline stage MKMLProfilerDeleter completed in 1.55s
Shutdown handler de-registered
stark2000s-utarcaht-v1-2_v1 status is now inactive due to auto deactivation removed underperforming models

Usage Metrics

Latency Metrics