submission_id: cycy233-l3-bp-v6-c2_v1
developer_uid: shiroe40
alignment_samples: 13467
alignment_score: -0.5649177461062312
best_of: 16
celo_rating: 1248.42
display_name: auto
formatter: {'memory_template': "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n{bot_name}'s Persona: {memory}\n\n", 'prompt_template': '{prompt}<|eot_id|>', 'bot_template': '<|start_header_id|>assistant<|end_header_id|>\n\n{bot_name}: {message}<|eot_id|>', 'user_template': '<|start_header_id|>user<|end_header_id|>\n\n{user_name}: {message}<|eot_id|>', 'response_template': '<|start_header_id|>assistant<|end_header_id|>\n\n{bot_name}:', 'truncate_by_message': False}
generation_params: {'temperature': 1.0, 'top_p': 0.9, 'min_p': 0.05, 'top_k': 80, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['<|end_header_id|>', '<|eot_id|>'], 'max_input_tokens': 512, 'best_of': 16, 'max_output_tokens': 64}
gpu_counts: {'NVIDIA RTX A5000': 1}
is_internal_developer: False
language_model: cycy233/L3-bp-v6-c2
latencies: [{'batch_size': 1, 'throughput': 0.9103939049686501, 'latency_mean': 1.0983316969871522, 'latency_p50': 1.0882246494293213, 'latency_p90': 1.2301892518997193}, {'batch_size': 4, 'throughput': 1.7743028066454525, 'latency_mean': 2.2466555988788603, 'latency_p50': 2.2409284114837646, 'latency_p90': 2.520865035057068}, {'batch_size': 5, 'throughput': 1.8717791785893478, 'latency_mean': 2.658571048974991, 'latency_p50': 2.6563538312911987, 'latency_p90': 2.9991030216217043}, {'batch_size': 8, 'throughput': 1.9833187462316602, 'latency_mean': 4.0045657885074615, 'latency_p50': 3.989296317100525, 'latency_p90': 4.500141716003418}, {'batch_size': 10, 'throughput': 2.0231347356332474, 'latency_mean': 4.8937017333507535, 'latency_p50': 4.9443405866622925, 'latency_p90': 5.61640920639038}, {'batch_size': 12, 'throughput': 2.030253055435844, 'latency_mean': 5.831160287857056, 'latency_p50': 5.889148712158203, 'latency_p90': 6.704883861541748}, {'batch_size': 15, 'throughput': 2.0046468734116316, 'latency_mean': 7.3454375648498536, 'latency_p50': 7.482760310173035, 'latency_p90': 8.193301153182983}]
max_input_tokens: 512
max_output_tokens: 64
model_architecture: LlamaForCausalLM
model_group: cycy233/L3-bp-v6-c2
model_name: auto
model_num_parameters: 8030261248.0
model_repo: cycy233/L3-bp-v6-c2
model_size: 8B
num_battles: 13467
num_wins: 6853
propriety_score: 0.7181146025878004
propriety_total_count: 1082.0
ranking_group: single
status: inactive
submission_type: basic
throughput_3p7s: 1.98
timestamp: 2024-09-05T03:16:46+00:00
us_pacific_date: 2024-09-04
win_ratio: 0.5088735427340907
Download Preference Data
Resubmit model