{"description":"import torchimport torch.nn as nn # --- \u30e2\u30c7\u30eb\u5b9a\u7fa9 ---class Model(nn.Module): def __init__(self, input_size, hidden_size, output_size): super().__init__() self.linear1 = nn.Linear(input_size, hidden_size) self.layer_norm1 = nn.LayerNorm(hidden_size) self.linear2 = nn.Linear(hidden_size, hidden_size) self\u2026","blog_title":"HTN20190109\u306e\u65e5\u8a18","url":"https://htn20190109.hatenablog.com/entry/2025/11/30/155553","author_url":"https://blog.hatena.ne.jp/HTN20190109/","provider_name":"Hatena Blog","blog_url":"https://htn20190109.hatenablog.com/","published":"2025-11-30 15:55:53","title":"LayerNorm","categories":["DL"],"html":"<iframe src=\"https://hatenablog-parts.com/embed?url=https%3A%2F%2Fhtn20190109.hatenablog.com%2Fentry%2F2025%2F11%2F30%2F155553\" title=\"LayerNorm - HTN20190109\u306e\u65e5\u8a18\" class=\"embed-card embed-blogcard\" scrolling=\"no\" frameborder=\"0\" style=\"display: block; width: 100%; height: 190px; max-width: 500px; margin: 10px 0px;\"></iframe>","image_url":null,"type":"rich","provider_url":"https://hatena.blog","height":"190","author_name":"HTN20190109","version":"1.0","width":"100%"}