submission_id: rica40325-lora32-third-step_v4
developer_uid: rica40325
alignment_samples: 10246
alignment_score: 0.38678552252285425
best_of: 16
celo_rating: 1260.48
display_name: rica40325-lora32-third-step_v2
formatter: {'memory_template': "{bot_name}'s Persona: {memory}\n####\n", 'prompt_template': '{prompt}\n<START>\n', 'bot_template': '{bot_name}: {message}\n', 'user_template': '{user_name}: {message}\n', 'response_template': '{bot_name}:', 'truncate_by_message': False}
generation_params: {'temperature': 0.95, 'top_p': 0.95, 'min_p': 0.05, 'top_k': 80, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['\n'], 'max_input_tokens': 512, 'best_of': 16, 'max_output_tokens': 64}
gpu_counts: {'NVIDIA RTX A5000': 1}
is_internal_developer: False
language_model: rica40325/lora32_third_step
latencies: [{'batch_size': 1, 'throughput': 0.916293864151681, 'latency_mean': 1.09129270195961, 'latency_p50': 1.0787122249603271, 'latency_p90': 1.229987382888794}, {'batch_size': 4, 'throughput': 1.752723396425103, 'latency_mean': 2.271913368701935, 'latency_p50': 2.264933466911316, 'latency_p90': 2.571057367324829}, {'batch_size': 5, 'throughput': 1.8515589769155583, 'latency_mean': 2.682383728027344, 'latency_p50': 2.663703680038452, 'latency_p90': 3.038500046730041}, {'batch_size': 8, 'throughput': 1.9440599428617582, 'latency_mean': 4.088932136297226, 'latency_p50': 4.119110703468323, 'latency_p90': 4.641162610054016}, {'batch_size': 10, 'throughput': 1.9986349464515685, 'latency_mean': 4.947938182353973, 'latency_p50': 4.908643126487732, 'latency_p90': 5.692831516265869}, {'batch_size': 12, 'throughput': 1.9919807499320459, 'latency_mean': 5.9406349003314975, 'latency_p50': 5.93007230758667, 'latency_p90': 7.090457344055175}, {'batch_size': 15, 'throughput': 2.0013185605311805, 'latency_mean': 7.340696872472763, 'latency_p50': 7.390771389007568, 'latency_p90': 8.175047373771667}]
max_input_tokens: 512
max_output_tokens: 64
model_architecture: LlamaForCausalLM
model_group: rica40325/lora32_third_s
model_name: rica40325-lora32-third-step_v2
model_num_parameters: 8030261248.0
model_repo: rica40325/lora32_third_step
model_size: 8B
num_battles: 10246
num_wins: 5471
propriety_score: 0.7589189189189189
propriety_total_count: 925.0
ranking_group: single
status: inactive
submission_type: basic
throughput_3p7s: 1.94
timestamp: 2024-08-31T10:32:04+00:00
us_pacific_date: 2024-08-31
win_ratio: 0.5339644739410502
Download Preference Data
Resubmit model