submission_id: nitral-ai-sekhmet-tahsin_7886_v1
developer_uid: Nitral-AI
alignment_samples: 0
best_of: 16
celo_rating: 1226.62
display_name: Sekhmet_Tahsin-L3-1-8B-v0-4
formatter: {'memory_template': "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n{bot_name}'s Persona: {memory}\n\n", 'prompt_template': '{prompt}<|eot_id|>', 'bot_template': '<|start_header_id|>assistant<|end_header_id|>\n\n{bot_name}: {message}<|eot_id|>', 'user_template': '<|start_header_id|>user<|end_header_id|>\n\n{user_name}: {message}<|eot_id|>', 'response_template': '<|start_header_id|>assistant<|end_header_id|>\n\n{bot_name}:', 'truncate_by_message': False}
generation_params: {'temperature': 1.2, 'top_p': 1.0, 'min_p': 0.1, 'top_k': 40, 'presence_penalty': 0.0, 'frequency_penalty': 0.0, 'stopping_words': ['\n', '<|end_header_id|>,', '<|eot_id|>,', '\n\n{user_name}', 'You:', '\n\n'], 'max_input_tokens': 512, 'best_of': 16, 'max_output_tokens': 64, 'reward_max_token_input': 256}
is_internal_developer: False
language_model: Nitral-AI/Sekhmet_Tahsin-L3.1-8B-v0.4
max_input_tokens: 512
max_output_tokens: 64
model_architecture: LlamaForCausalLM
model_group: Nitral-AI/Sekhmet_Tahsin
model_name: Sekhmet_Tahsin-L3-1-8B-v0-4
model_num_parameters: 8030261248.0
model_repo: Nitral-AI/Sekhmet_Tahsin-L3.1-8B-v0.4
model_size: 8B
num_battles: 15850
num_wins: 8099
propriety_score: 0.718772826880935
propriety_total_count: 1369.0
ranking_group: single
reward_formatter: {'bot_template': '{bot_name}: {message}\n', 'memory_template': "{bot_name}'s Persona: {memory}\n####\n", 'prompt_template': '{prompt}\n<START>\n', 'response_template': '{bot_name}:', 'truncate_by_message': False, 'user_template': '{user_name}: {message}\n'}
reward_repo: Jellywibble/gpt2_xl_pairwise_89m_step_347634
status: torndown
submission_type: basic
timestamp: 2024-08-07T20:39:00+00:00
us_pacific_date: 2024-08-07
win_ratio: 0.5109779179810725
Resubmit model