submission_id: nitral-ai-hathor-tahsin-_9764_v7
developer_uid: Nitral-AI
alignment_samples: 11446
alignment_score: -0.016822014866178955
best_of: 16
celo_rating: 1223.78
display_name: Hathor_Tahsin-L3-8B-v0-9
formatter: {'memory_template': "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n{bot_name}'s Persona: {memory}\n\n", 'prompt_template': '{prompt}<|eot_id|>', 'bot_template': '<|start_header_id|>assistant<|end_header_id|>\n\n{bot_name}: {message}<|eot_id|>', 'user_template': '<|start_header_id|>user<|end_header_id|>\n\n{user_name}: {message}<|eot_id|>', 'response_template': '<|start_header_id|>assistant<|end_header_id|>\n\n{bot_name}:', 'truncate_by_message': False}
generation_params: {'temperature': 0.95, 'top_p': 1.0, 'min_p': 0.8, 'top_k': 40, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['\n', '<|end_header_id|>,', '<|eot_id|>,', '\n\n{user_name}', 'You:', '\n\n'], 'max_input_tokens': 512, 'best_of': 16, 'max_output_tokens': 64, 'reward_max_token_input': 256}
is_internal_developer: False
language_model: Nitral-AI/Hathor_Tahsin-L3-8B-v0.9
max_input_tokens: 512
max_output_tokens: 64
model_architecture: LlamaForCausalLM
model_group: Nitral-AI/Hathor_Tahsin-
model_name: Hathor_Tahsin-L3-8B-v0-9
model_num_parameters: 8030261248.0
model_repo: Nitral-AI/Hathor_Tahsin-L3-8B-v0.9
model_size: 8B
num_battles: 11446
num_wins: 5752
propriety_score: 0.7121513944223108
propriety_total_count: 1004.0
ranking_group: single
reward_formatter: {'bot_template': '{bot_name}: {message}\n', 'memory_template': "{bot_name}'s Persona: {memory}\n####\n", 'prompt_template': '{prompt}\n<START>\n', 'response_template': '{bot_name}:', 'truncate_by_message': False, 'user_template': '{user_name}: {message}\n'}
reward_repo: Jellywibble/gpt2_xl_pairwise_89m_step_347634
status: torndown
submission_type: basic
timestamp: 2024-08-10T10:01:21+00:00
us_pacific_date: 2024-08-10
win_ratio: 0.5025336362047877
Download Preference Data
Resubmit model